repository_name
stringclasses 316
values | func_path_in_repository
stringlengths 6
223
| func_name
stringlengths 1
134
| language
stringclasses 1
value | func_code_string
stringlengths 57
65.5k
| func_documentation_string
stringlengths 1
46.3k
| split_name
stringclasses 1
value | func_code_url
stringlengths 91
315
| called_functions
listlengths 1
156
⌀ | enclosing_scope
stringlengths 2
1.48M
|
|---|---|---|---|---|---|---|---|---|---|
invoice-x/invoice2data
|
src/invoice2data/output/to_csv.py
|
write_to_file
|
python
|
def write_to_file(data, path):
if path.endswith('.csv'):
filename = path
else:
filename = path + '.csv'
if sys.version_info[0] < 3:
openfile = open(filename, "wb")
else:
openfile = open(filename, "w", newline='')
with openfile as csv_file:
writer = csv.writer(csv_file, delimiter=',')
for line in data:
first_row = []
for k, v in line.items():
first_row.append(k)
writer.writerow(first_row)
for line in data:
csv_items = []
for k, v in line.items():
# first_row.append(k)
if k == 'date':
v = v.strftime('%d/%m/%Y')
csv_items.append(v)
writer.writerow(csv_items)
|
Export extracted fields to csv
Appends .csv to path if missing and generates csv file in specified directory, if not then in root
Parameters
----------
data : dict
Dictionary of extracted fields
path : str
directory to save generated csv file
Notes
----
Do give file name to the function parameter path.
Examples
--------
>>> from invoice2data.output import to_csv
>>> to_csv.write_to_file(data, "/exported_csv/invoice.csv")
>>> to_csv.write_to_file(data, "invoice.csv")
|
train
|
https://github.com/invoice-x/invoice2data/blob/d97fdc5db9c1844fd77fa64f8ea7c42fefd0ba20/src/invoice2data/output/to_csv.py#L5-L54
| null |
import csv
import sys
|
invoice-x/invoice2data
|
src/invoice2data/input/tesseract.py
|
to_text
|
python
|
def to_text(path):
import subprocess
from distutils import spawn
# Check for dependencies. Needs Tesseract and Imagemagick installed.
if not spawn.find_executable('tesseract'):
raise EnvironmentError('tesseract not installed.')
if not spawn.find_executable('convert'):
raise EnvironmentError('imagemagick not installed.')
# convert = "convert -density 350 %s -depth 8 tiff:-" % (path)
convert = ['convert', '-density', '350', path, '-depth', '8', 'png:-']
p1 = subprocess.Popen(convert, stdout=subprocess.PIPE)
tess = ['tesseract', 'stdin', 'stdout']
p2 = subprocess.Popen(tess, stdin=p1.stdout, stdout=subprocess.PIPE)
out, err = p2.communicate()
extracted_str = out
return extracted_str
|
Wraps Tesseract OCR.
Parameters
----------
path : str
path of electronic invoice in JPG or PNG format
Returns
-------
extracted_str : str
returns extracted text from image in JPG or PNG format
|
train
|
https://github.com/invoice-x/invoice2data/blob/d97fdc5db9c1844fd77fa64f8ea7c42fefd0ba20/src/invoice2data/input/tesseract.py#L4-L38
| null |
# -*- coding: utf-8 -*-
|
invoice-x/invoice2data
|
src/invoice2data/extract/plugins/tables.py
|
extract
|
python
|
def extract(self, content, output):
for table in self['tables']:
# First apply default options.
plugin_settings = DEFAULT_OPTIONS.copy()
plugin_settings.update(table)
table = plugin_settings
# Validate settings
assert 'start' in table, 'Table start regex missing'
assert 'end' in table, 'Table end regex missing'
assert 'body' in table, 'Table body regex missing'
start = re.search(table['start'], content)
end = re.search(table['end'], content)
if not start or not end:
logger.warning('no table body found - start %s, end %s', start, end)
continue
table_body = content[start.end(): end.start()]
for line in re.split(table['line_separator'], table_body):
# if the line has empty lines in it , skip them
if not line.strip('').strip('\n') or not line:
continue
match = re.search(table['body'], line)
if match:
for field, value in match.groupdict().items():
# If a field name already exists, do not overwrite it
if field in output:
continue
if field.startswith('date') or field.endswith('date'):
output[field] = self.parse_date(value)
if not output[field]:
logger.error("Date parsing failed on date '%s'", value)
return None
elif field.startswith('amount'):
output[field] = self.parse_number(value)
else:
output[field] = value
logger.debug('ignoring *%s* because it doesn\'t match anything', line)
|
Try to extract tables from an invoice
|
train
|
https://github.com/invoice-x/invoice2data/blob/d97fdc5db9c1844fd77fa64f8ea7c42fefd0ba20/src/invoice2data/extract/plugins/tables.py#L11-L56
| null |
"""
Plugin to extract tables from an invoice.
"""
import re
import logging as logger
DEFAULT_OPTIONS = {'field_separator': r'\s+', 'line_separator': r'\n'}
|
invoice-x/invoice2data
|
src/invoice2data/input/pdftotext.py
|
to_text
|
python
|
def to_text(path):
import subprocess
from distutils import spawn # py2 compat
if spawn.find_executable("pdftotext"): # shutil.which('pdftotext'):
out, err = subprocess.Popen(
["pdftotext", '-layout', '-enc', 'UTF-8', path, '-'], stdout=subprocess.PIPE
).communicate()
return out
else:
raise EnvironmentError(
'pdftotext not installed. Can be downloaded from https://poppler.freedesktop.org/'
)
|
Wrapper around Poppler pdftotext.
Parameters
----------
path : str
path of electronic invoice in PDF
Returns
-------
out : str
returns extracted text from pdf
Raises
------
EnvironmentError:
If pdftotext library is not found
|
train
|
https://github.com/invoice-x/invoice2data/blob/d97fdc5db9c1844fd77fa64f8ea7c42fefd0ba20/src/invoice2data/input/pdftotext.py#L2-L31
| null |
# -*- coding: utf-8 -*-
|
invoice-x/invoice2data
|
src/invoice2data/extract/invoice_template.py
|
InvoiceTemplate.prepare_input
|
python
|
def prepare_input(self, extracted_str):
# Remove withspace
if self.options['remove_whitespace']:
optimized_str = re.sub(' +', '', extracted_str)
else:
optimized_str = extracted_str
# Remove accents
if self.options['remove_accents']:
optimized_str = unidecode(optimized_str)
# convert to lower case
if self.options['lowercase']:
optimized_str = optimized_str.lower()
# specific replace
for replace in self.options['replace']:
assert len(replace) == 2, 'A replace should be a list of 2 items'
optimized_str = optimized_str.replace(replace[0], replace[1])
return optimized_str
|
Input raw string and do transformations, as set in template file.
|
train
|
https://github.com/invoice-x/invoice2data/blob/d97fdc5db9c1844fd77fa64f8ea7c42fefd0ba20/src/invoice2data/extract/invoice_template.py#L64-L88
| null |
class InvoiceTemplate(OrderedDict):
"""
Represents single template files that live as .yml files on the disk.
Methods
-------
prepare_input(extracted_str)
Input raw string and do transformations, as set in template file.
matches_input(optimized_str)
See if string matches keywords set in template file
parse_number(value)
Parse number, remove decimal separator and add other options
parse_date(value)
Parses date and returns date after parsing
coerce_type(value, target_type)
change type of values
extract(optimized_str)
Given a template file and a string, extract matching data fields.
"""
def __init__(self, *args, **kwargs):
super(InvoiceTemplate, self).__init__(*args, **kwargs)
# Merge template-specific options with defaults
self.options = OPTIONS_DEFAULT.copy()
for lang in self.options['languages']:
assert len(lang) == 2, 'lang code must have 2 letters'
if 'options' in self:
self.options.update(self['options'])
# Set issuer, if it doesn't exist.
if 'issuer' not in self.keys():
self['issuer'] = self['keywords'][0]
def matches_input(self, optimized_str):
"""See if string matches keywords set in template file"""
if all([keyword in optimized_str for keyword in self['keywords']]):
logger.debug('Matched template %s', self['template_name'])
return True
def parse_number(self, value):
assert (
value.count(self.options['decimal_separator']) < 2
), 'Decimal separator cannot be present several times'
# replace decimal separator by a |
amount_pipe = value.replace(self.options['decimal_separator'], '|')
# remove all possible thousands separators
amount_pipe_no_thousand_sep = re.sub(r'[.,\s]', '', amount_pipe)
# put dot as decimal sep
return float(amount_pipe_no_thousand_sep.replace('|', '.'))
def parse_date(self, value):
"""Parses date and returns date after parsing"""
res = dateparser.parse(
value, date_formats=self.options['date_formats'], languages=self.options['languages']
)
logger.debug("result of date parsing=%s", res)
return res
def coerce_type(self, value, target_type):
if target_type == 'int':
if not value.strip():
return 0
return int(self.parse_number(value))
elif target_type == 'float':
if not value.strip():
return 0.0
return float(self.parse_number(value))
elif target_type == 'date':
return self.parse_date(value)
assert False, 'Unknown type'
def extract(self, optimized_str):
"""
Given a template file and a string, extract matching data fields.
"""
logger.debug('START optimized_str ========================')
logger.debug(optimized_str)
logger.debug('END optimized_str ==========================')
logger.debug(
'Date parsing: languages=%s date_formats=%s',
self.options['languages'],
self.options['date_formats'],
)
logger.debug('Float parsing: decimal separator=%s', self.options['decimal_separator'])
logger.debug("keywords=%s", self['keywords'])
logger.debug(self.options)
# Try to find data for each field.
output = {}
output['issuer'] = self['issuer']
for k, v in self['fields'].items():
if k.startswith('static_'):
logger.debug("field=%s | static value=%s", k, v)
output[k.replace('static_', '')] = v
else:
logger.debug("field=%s | regexp=%s", k, v)
sum_field = False
if k.startswith('sum_amount') and type(v) is list:
k = k[4:] # remove 'sum_' prefix
sum_field = True
# Fields can have multiple expressions
if type(v) is list:
res_find = []
for v_option in v:
res_val = re.findall(v_option, optimized_str)
if res_val:
if sum_field:
res_find += res_val
else:
res_find.extend(res_val)
else:
res_find = re.findall(v, optimized_str)
if res_find:
logger.debug("res_find=%s", res_find)
if k.startswith('date') or k.endswith('date'):
output[k] = self.parse_date(res_find[0])
if not output[k]:
logger.error("Date parsing failed on date '%s'", res_find[0])
return None
elif k.startswith('amount'):
if sum_field:
output[k] = 0
for amount_to_parse in res_find:
output[k] += self.parse_number(amount_to_parse)
else:
output[k] = self.parse_number(res_find[0])
else:
res_find = list(set(res_find))
if len(res_find) == 1:
output[k] = res_find[0]
else:
output[k] = res_find
else:
logger.warning("regexp for field %s didn't match", k)
output['currency'] = self.options['currency']
# Run plugins:
for plugin_keyword, plugin_func in PLUGIN_MAPPING.items():
if plugin_keyword in self.keys():
plugin_func.extract(self, optimized_str, output)
# If required fields were found, return output, else log error.
if 'required_fields' not in self.keys():
required_fields = ['date', 'amount', 'invoice_number', 'issuer']
else:
required_fields = []
for v in self['required_fields']:
required_fields.append(v)
if set(required_fields).issubset(output.keys()):
output['desc'] = 'Invoice from %s' % (self['issuer'])
logger.debug(output)
return output
else:
fields = list(set(output.keys()))
logger.error(
'Unable to match all required fields. '
'The required fields are: {0}. '
'Output contains the following fields: {1}.'.format(required_fields, fields)
)
return None
|
invoice-x/invoice2data
|
src/invoice2data/extract/invoice_template.py
|
InvoiceTemplate.matches_input
|
python
|
def matches_input(self, optimized_str):
if all([keyword in optimized_str for keyword in self['keywords']]):
logger.debug('Matched template %s', self['template_name'])
return True
|
See if string matches keywords set in template file
|
train
|
https://github.com/invoice-x/invoice2data/blob/d97fdc5db9c1844fd77fa64f8ea7c42fefd0ba20/src/invoice2data/extract/invoice_template.py#L90-L95
| null |
class InvoiceTemplate(OrderedDict):
"""
Represents single template files that live as .yml files on the disk.
Methods
-------
prepare_input(extracted_str)
Input raw string and do transformations, as set in template file.
matches_input(optimized_str)
See if string matches keywords set in template file
parse_number(value)
Parse number, remove decimal separator and add other options
parse_date(value)
Parses date and returns date after parsing
coerce_type(value, target_type)
change type of values
extract(optimized_str)
Given a template file and a string, extract matching data fields.
"""
def __init__(self, *args, **kwargs):
super(InvoiceTemplate, self).__init__(*args, **kwargs)
# Merge template-specific options with defaults
self.options = OPTIONS_DEFAULT.copy()
for lang in self.options['languages']:
assert len(lang) == 2, 'lang code must have 2 letters'
if 'options' in self:
self.options.update(self['options'])
# Set issuer, if it doesn't exist.
if 'issuer' not in self.keys():
self['issuer'] = self['keywords'][0]
def prepare_input(self, extracted_str):
"""
Input raw string and do transformations, as set in template file.
"""
# Remove withspace
if self.options['remove_whitespace']:
optimized_str = re.sub(' +', '', extracted_str)
else:
optimized_str = extracted_str
# Remove accents
if self.options['remove_accents']:
optimized_str = unidecode(optimized_str)
# convert to lower case
if self.options['lowercase']:
optimized_str = optimized_str.lower()
# specific replace
for replace in self.options['replace']:
assert len(replace) == 2, 'A replace should be a list of 2 items'
optimized_str = optimized_str.replace(replace[0], replace[1])
return optimized_str
def parse_number(self, value):
assert (
value.count(self.options['decimal_separator']) < 2
), 'Decimal separator cannot be present several times'
# replace decimal separator by a |
amount_pipe = value.replace(self.options['decimal_separator'], '|')
# remove all possible thousands separators
amount_pipe_no_thousand_sep = re.sub(r'[.,\s]', '', amount_pipe)
# put dot as decimal sep
return float(amount_pipe_no_thousand_sep.replace('|', '.'))
def parse_date(self, value):
"""Parses date and returns date after parsing"""
res = dateparser.parse(
value, date_formats=self.options['date_formats'], languages=self.options['languages']
)
logger.debug("result of date parsing=%s", res)
return res
def coerce_type(self, value, target_type):
if target_type == 'int':
if not value.strip():
return 0
return int(self.parse_number(value))
elif target_type == 'float':
if not value.strip():
return 0.0
return float(self.parse_number(value))
elif target_type == 'date':
return self.parse_date(value)
assert False, 'Unknown type'
def extract(self, optimized_str):
"""
Given a template file and a string, extract matching data fields.
"""
logger.debug('START optimized_str ========================')
logger.debug(optimized_str)
logger.debug('END optimized_str ==========================')
logger.debug(
'Date parsing: languages=%s date_formats=%s',
self.options['languages'],
self.options['date_formats'],
)
logger.debug('Float parsing: decimal separator=%s', self.options['decimal_separator'])
logger.debug("keywords=%s", self['keywords'])
logger.debug(self.options)
# Try to find data for each field.
output = {}
output['issuer'] = self['issuer']
for k, v in self['fields'].items():
if k.startswith('static_'):
logger.debug("field=%s | static value=%s", k, v)
output[k.replace('static_', '')] = v
else:
logger.debug("field=%s | regexp=%s", k, v)
sum_field = False
if k.startswith('sum_amount') and type(v) is list:
k = k[4:] # remove 'sum_' prefix
sum_field = True
# Fields can have multiple expressions
if type(v) is list:
res_find = []
for v_option in v:
res_val = re.findall(v_option, optimized_str)
if res_val:
if sum_field:
res_find += res_val
else:
res_find.extend(res_val)
else:
res_find = re.findall(v, optimized_str)
if res_find:
logger.debug("res_find=%s", res_find)
if k.startswith('date') or k.endswith('date'):
output[k] = self.parse_date(res_find[0])
if not output[k]:
logger.error("Date parsing failed on date '%s'", res_find[0])
return None
elif k.startswith('amount'):
if sum_field:
output[k] = 0
for amount_to_parse in res_find:
output[k] += self.parse_number(amount_to_parse)
else:
output[k] = self.parse_number(res_find[0])
else:
res_find = list(set(res_find))
if len(res_find) == 1:
output[k] = res_find[0]
else:
output[k] = res_find
else:
logger.warning("regexp for field %s didn't match", k)
output['currency'] = self.options['currency']
# Run plugins:
for plugin_keyword, plugin_func in PLUGIN_MAPPING.items():
if plugin_keyword in self.keys():
plugin_func.extract(self, optimized_str, output)
# If required fields were found, return output, else log error.
if 'required_fields' not in self.keys():
required_fields = ['date', 'amount', 'invoice_number', 'issuer']
else:
required_fields = []
for v in self['required_fields']:
required_fields.append(v)
if set(required_fields).issubset(output.keys()):
output['desc'] = 'Invoice from %s' % (self['issuer'])
logger.debug(output)
return output
else:
fields = list(set(output.keys()))
logger.error(
'Unable to match all required fields. '
'The required fields are: {0}. '
'Output contains the following fields: {1}.'.format(required_fields, fields)
)
return None
|
invoice-x/invoice2data
|
src/invoice2data/extract/invoice_template.py
|
InvoiceTemplate.parse_date
|
python
|
def parse_date(self, value):
res = dateparser.parse(
value, date_formats=self.options['date_formats'], languages=self.options['languages']
)
logger.debug("result of date parsing=%s", res)
return res
|
Parses date and returns date after parsing
|
train
|
https://github.com/invoice-x/invoice2data/blob/d97fdc5db9c1844fd77fa64f8ea7c42fefd0ba20/src/invoice2data/extract/invoice_template.py#L108-L114
| null |
class InvoiceTemplate(OrderedDict):
"""
Represents single template files that live as .yml files on the disk.
Methods
-------
prepare_input(extracted_str)
Input raw string and do transformations, as set in template file.
matches_input(optimized_str)
See if string matches keywords set in template file
parse_number(value)
Parse number, remove decimal separator and add other options
parse_date(value)
Parses date and returns date after parsing
coerce_type(value, target_type)
change type of values
extract(optimized_str)
Given a template file and a string, extract matching data fields.
"""
def __init__(self, *args, **kwargs):
super(InvoiceTemplate, self).__init__(*args, **kwargs)
# Merge template-specific options with defaults
self.options = OPTIONS_DEFAULT.copy()
for lang in self.options['languages']:
assert len(lang) == 2, 'lang code must have 2 letters'
if 'options' in self:
self.options.update(self['options'])
# Set issuer, if it doesn't exist.
if 'issuer' not in self.keys():
self['issuer'] = self['keywords'][0]
def prepare_input(self, extracted_str):
"""
Input raw string and do transformations, as set in template file.
"""
# Remove withspace
if self.options['remove_whitespace']:
optimized_str = re.sub(' +', '', extracted_str)
else:
optimized_str = extracted_str
# Remove accents
if self.options['remove_accents']:
optimized_str = unidecode(optimized_str)
# convert to lower case
if self.options['lowercase']:
optimized_str = optimized_str.lower()
# specific replace
for replace in self.options['replace']:
assert len(replace) == 2, 'A replace should be a list of 2 items'
optimized_str = optimized_str.replace(replace[0], replace[1])
return optimized_str
def matches_input(self, optimized_str):
"""See if string matches keywords set in template file"""
if all([keyword in optimized_str for keyword in self['keywords']]):
logger.debug('Matched template %s', self['template_name'])
return True
def parse_number(self, value):
assert (
value.count(self.options['decimal_separator']) < 2
), 'Decimal separator cannot be present several times'
# replace decimal separator by a |
amount_pipe = value.replace(self.options['decimal_separator'], '|')
# remove all possible thousands separators
amount_pipe_no_thousand_sep = re.sub(r'[.,\s]', '', amount_pipe)
# put dot as decimal sep
return float(amount_pipe_no_thousand_sep.replace('|', '.'))
def coerce_type(self, value, target_type):
if target_type == 'int':
if not value.strip():
return 0
return int(self.parse_number(value))
elif target_type == 'float':
if not value.strip():
return 0.0
return float(self.parse_number(value))
elif target_type == 'date':
return self.parse_date(value)
assert False, 'Unknown type'
def extract(self, optimized_str):
"""
Given a template file and a string, extract matching data fields.
"""
logger.debug('START optimized_str ========================')
logger.debug(optimized_str)
logger.debug('END optimized_str ==========================')
logger.debug(
'Date parsing: languages=%s date_formats=%s',
self.options['languages'],
self.options['date_formats'],
)
logger.debug('Float parsing: decimal separator=%s', self.options['decimal_separator'])
logger.debug("keywords=%s", self['keywords'])
logger.debug(self.options)
# Try to find data for each field.
output = {}
output['issuer'] = self['issuer']
for k, v in self['fields'].items():
if k.startswith('static_'):
logger.debug("field=%s | static value=%s", k, v)
output[k.replace('static_', '')] = v
else:
logger.debug("field=%s | regexp=%s", k, v)
sum_field = False
if k.startswith('sum_amount') and type(v) is list:
k = k[4:] # remove 'sum_' prefix
sum_field = True
# Fields can have multiple expressions
if type(v) is list:
res_find = []
for v_option in v:
res_val = re.findall(v_option, optimized_str)
if res_val:
if sum_field:
res_find += res_val
else:
res_find.extend(res_val)
else:
res_find = re.findall(v, optimized_str)
if res_find:
logger.debug("res_find=%s", res_find)
if k.startswith('date') or k.endswith('date'):
output[k] = self.parse_date(res_find[0])
if not output[k]:
logger.error("Date parsing failed on date '%s'", res_find[0])
return None
elif k.startswith('amount'):
if sum_field:
output[k] = 0
for amount_to_parse in res_find:
output[k] += self.parse_number(amount_to_parse)
else:
output[k] = self.parse_number(res_find[0])
else:
res_find = list(set(res_find))
if len(res_find) == 1:
output[k] = res_find[0]
else:
output[k] = res_find
else:
logger.warning("regexp for field %s didn't match", k)
output['currency'] = self.options['currency']
# Run plugins:
for plugin_keyword, plugin_func in PLUGIN_MAPPING.items():
if plugin_keyword in self.keys():
plugin_func.extract(self, optimized_str, output)
# If required fields were found, return output, else log error.
if 'required_fields' not in self.keys():
required_fields = ['date', 'amount', 'invoice_number', 'issuer']
else:
required_fields = []
for v in self['required_fields']:
required_fields.append(v)
if set(required_fields).issubset(output.keys()):
output['desc'] = 'Invoice from %s' % (self['issuer'])
logger.debug(output)
return output
else:
fields = list(set(output.keys()))
logger.error(
'Unable to match all required fields. '
'The required fields are: {0}. '
'Output contains the following fields: {1}.'.format(required_fields, fields)
)
return None
|
invoice-x/invoice2data
|
src/invoice2data/extract/invoice_template.py
|
InvoiceTemplate.extract
|
python
|
def extract(self, optimized_str):
logger.debug('START optimized_str ========================')
logger.debug(optimized_str)
logger.debug('END optimized_str ==========================')
logger.debug(
'Date parsing: languages=%s date_formats=%s',
self.options['languages'],
self.options['date_formats'],
)
logger.debug('Float parsing: decimal separator=%s', self.options['decimal_separator'])
logger.debug("keywords=%s", self['keywords'])
logger.debug(self.options)
# Try to find data for each field.
output = {}
output['issuer'] = self['issuer']
for k, v in self['fields'].items():
if k.startswith('static_'):
logger.debug("field=%s | static value=%s", k, v)
output[k.replace('static_', '')] = v
else:
logger.debug("field=%s | regexp=%s", k, v)
sum_field = False
if k.startswith('sum_amount') and type(v) is list:
k = k[4:] # remove 'sum_' prefix
sum_field = True
# Fields can have multiple expressions
if type(v) is list:
res_find = []
for v_option in v:
res_val = re.findall(v_option, optimized_str)
if res_val:
if sum_field:
res_find += res_val
else:
res_find.extend(res_val)
else:
res_find = re.findall(v, optimized_str)
if res_find:
logger.debug("res_find=%s", res_find)
if k.startswith('date') or k.endswith('date'):
output[k] = self.parse_date(res_find[0])
if not output[k]:
logger.error("Date parsing failed on date '%s'", res_find[0])
return None
elif k.startswith('amount'):
if sum_field:
output[k] = 0
for amount_to_parse in res_find:
output[k] += self.parse_number(amount_to_parse)
else:
output[k] = self.parse_number(res_find[0])
else:
res_find = list(set(res_find))
if len(res_find) == 1:
output[k] = res_find[0]
else:
output[k] = res_find
else:
logger.warning("regexp for field %s didn't match", k)
output['currency'] = self.options['currency']
# Run plugins:
for plugin_keyword, plugin_func in PLUGIN_MAPPING.items():
if plugin_keyword in self.keys():
plugin_func.extract(self, optimized_str, output)
# If required fields were found, return output, else log error.
if 'required_fields' not in self.keys():
required_fields = ['date', 'amount', 'invoice_number', 'issuer']
else:
required_fields = []
for v in self['required_fields']:
required_fields.append(v)
if set(required_fields).issubset(output.keys()):
output['desc'] = 'Invoice from %s' % (self['issuer'])
logger.debug(output)
return output
else:
fields = list(set(output.keys()))
logger.error(
'Unable to match all required fields. '
'The required fields are: {0}. '
'Output contains the following fields: {1}.'.format(required_fields, fields)
)
return None
|
Given a template file and a string, extract matching data fields.
|
train
|
https://github.com/invoice-x/invoice2data/blob/d97fdc5db9c1844fd77fa64f8ea7c42fefd0ba20/src/invoice2data/extract/invoice_template.py#L129-L222
| null |
class InvoiceTemplate(OrderedDict):
"""
Represents single template files that live as .yml files on the disk.
Methods
-------
prepare_input(extracted_str)
Input raw string and do transformations, as set in template file.
matches_input(optimized_str)
See if string matches keywords set in template file
parse_number(value)
Parse number, remove decimal separator and add other options
parse_date(value)
Parses date and returns date after parsing
coerce_type(value, target_type)
change type of values
extract(optimized_str)
Given a template file and a string, extract matching data fields.
"""
def __init__(self, *args, **kwargs):
super(InvoiceTemplate, self).__init__(*args, **kwargs)
# Merge template-specific options with defaults
self.options = OPTIONS_DEFAULT.copy()
for lang in self.options['languages']:
assert len(lang) == 2, 'lang code must have 2 letters'
if 'options' in self:
self.options.update(self['options'])
# Set issuer, if it doesn't exist.
if 'issuer' not in self.keys():
self['issuer'] = self['keywords'][0]
def prepare_input(self, extracted_str):
"""
Input raw string and do transformations, as set in template file.
"""
# Remove withspace
if self.options['remove_whitespace']:
optimized_str = re.sub(' +', '', extracted_str)
else:
optimized_str = extracted_str
# Remove accents
if self.options['remove_accents']:
optimized_str = unidecode(optimized_str)
# convert to lower case
if self.options['lowercase']:
optimized_str = optimized_str.lower()
# specific replace
for replace in self.options['replace']:
assert len(replace) == 2, 'A replace should be a list of 2 items'
optimized_str = optimized_str.replace(replace[0], replace[1])
return optimized_str
def matches_input(self, optimized_str):
"""See if string matches keywords set in template file"""
if all([keyword in optimized_str for keyword in self['keywords']]):
logger.debug('Matched template %s', self['template_name'])
return True
def parse_number(self, value):
assert (
value.count(self.options['decimal_separator']) < 2
), 'Decimal separator cannot be present several times'
# replace decimal separator by a |
amount_pipe = value.replace(self.options['decimal_separator'], '|')
# remove all possible thousands separators
amount_pipe_no_thousand_sep = re.sub(r'[.,\s]', '', amount_pipe)
# put dot as decimal sep
return float(amount_pipe_no_thousand_sep.replace('|', '.'))
def parse_date(self, value):
"""Parses date and returns date after parsing"""
res = dateparser.parse(
value, date_formats=self.options['date_formats'], languages=self.options['languages']
)
logger.debug("result of date parsing=%s", res)
return res
def coerce_type(self, value, target_type):
if target_type == 'int':
if not value.strip():
return 0
return int(self.parse_number(value))
elif target_type == 'float':
if not value.strip():
return 0.0
return float(self.parse_number(value))
elif target_type == 'date':
return self.parse_date(value)
assert False, 'Unknown type'
|
invoice-x/invoice2data
|
src/invoice2data/output/to_json.py
|
write_to_file
|
python
|
def write_to_file(data, path):
if path.endswith('.json'):
filename = path
else:
filename = path + '.json'
with codecs.open(filename, "w", encoding='utf-8') as json_file:
for line in data:
line['date'] = line['date'].strftime('%d/%m/%Y')
print(type(json))
print(json)
json.dump(
data, json_file, indent=4, sort_keys=True, default=myconverter, ensure_ascii=False
)
|
Export extracted fields to json
Appends .json to path if missing and generates json file in specified directory, if not then in root
Parameters
----------
data : dict
Dictionary of extracted fields
path : str
directory to save generated json file
Notes
----
Do give file name to the function parameter path.
Examples
--------
>>> from invoice2data.output import to_json
>>> to_json.write_to_file(data, "/exported_json/invoice.json")
>>> to_json.write_to_file(data, "invoice.json")
|
train
|
https://github.com/invoice-x/invoice2data/blob/d97fdc5db9c1844fd77fa64f8ea7c42fefd0ba20/src/invoice2data/output/to_json.py#L12-L47
| null |
import json
import datetime
import codecs
def myconverter(o):
"""function to serialise datetime"""
if isinstance(o, datetime.datetime):
return o.__str__()
|
invoice-x/invoice2data
|
src/invoice2data/main.py
|
extract_data
|
python
|
def extract_data(invoicefile, templates=None, input_module=pdftotext):
if templates is None:
templates = read_templates()
# print(templates[0])
extracted_str = input_module.to_text(invoicefile).decode('utf-8')
logger.debug('START pdftotext result ===========================')
logger.debug(extracted_str)
logger.debug('END pdftotext result =============================')
logger.debug('Testing {} template files'.format(len(templates)))
for t in templates:
optimized_str = t.prepare_input(extracted_str)
if t.matches_input(optimized_str):
return t.extract(optimized_str)
logger.error('No template for %s', invoicefile)
return False
|
Extracts structured data from PDF/image invoices.
This function uses the text extracted from a PDF file or image and
pre-defined regex templates to find structured data.
Reads template if no template assigned
Required fields are matches from templates
Parameters
----------
invoicefile : str
path of electronic invoice file in PDF,JPEG,PNG (example: "/home/duskybomb/pdf/invoice.pdf")
templates : list of instances of class `InvoiceTemplate`, optional
Templates are loaded using `read_template` function in `loader.py`
input_module : {'pdftotext', 'pdfminer', 'tesseract'}, optional
library to be used to extract text from given `invoicefile`,
Returns
-------
dict or False
extracted and matched fields or False if no template matches
Notes
-----
Import required `input_module` when using invoice2data as a library
See Also
--------
read_template : Function where templates are loaded
InvoiceTemplate : Class representing single template files that live as .yml files on the disk
Examples
--------
When using `invoice2data` as an library
>>> from invoice2data.input import pdftotext
>>> extract_data("invoice2data/test/pdfs/oyo.pdf", None, pdftotext)
{'issuer': 'OYO', 'amount': 1939.0, 'date': datetime.datetime(2017, 12, 31, 0, 0), 'invoice_number': 'IBZY2087',
'currency': 'INR', 'desc': 'Invoice IBZY2087 from OYO'}
|
train
|
https://github.com/invoice-x/invoice2data/blob/d97fdc5db9c1844fd77fa64f8ea7c42fefd0ba20/src/invoice2data/main.py#L36-L96
|
[
"def to_text(path):\n \"\"\"Wrapper around Poppler pdftotext.\n\n Parameters\n ----------\n path : str\n path of electronic invoice in PDF\n\n Returns\n -------\n out : str\n returns extracted text from pdf\n\n Raises\n ------\n EnvironmentError:\n If pdftotext library is not found\n \"\"\"\n import subprocess\n from distutils import spawn # py2 compat\n\n if spawn.find_executable(\"pdftotext\"): # shutil.which('pdftotext'):\n out, err = subprocess.Popen(\n [\"pdftotext\", '-layout', '-enc', 'UTF-8', path, '-'], stdout=subprocess.PIPE\n ).communicate()\n return out\n else:\n raise EnvironmentError(\n 'pdftotext not installed. Can be downloaded from https://poppler.freedesktop.org/'\n )\n",
"def to_text(path):\n \"\"\"Wrapper around `pdfminer`.\n\n Parameters\n ----------\n path : str\n path of electronic invoice in PDF\n\n Returns\n -------\n str : str\n returns extracted text from pdf\n\n \"\"\"\n\n try:\n # python 2\n from StringIO import StringIO\n import sys\n\n reload(sys) # noqa: F821\n sys.setdefaultencoding('utf8')\n except ImportError:\n from io import StringIO\n\n from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter\n from pdfminer.converter import TextConverter\n from pdfminer.layout import LAParams\n from pdfminer.pdfpage import PDFPage\n\n rsrcmgr = PDFResourceManager()\n retstr = StringIO()\n codec = 'utf-8'\n laparams = LAParams()\n laparams.all_texts = True\n device = TextConverter(rsrcmgr, retstr, codec=codec, laparams=laparams)\n with open(path, 'rb') as fp:\n interpreter = PDFPageInterpreter(rsrcmgr, device)\n password = \"\"\n maxpages = 0\n caching = True\n pagenos = set()\n pages = PDFPage.get_pages(\n fp,\n pagenos,\n maxpages=maxpages,\n password=password,\n caching=caching,\n check_extractable=True,\n )\n for page in pages:\n interpreter.process_page(page)\n device.close()\n str = retstr.getvalue()\n retstr.close()\n return str.encode('utf-8')\n",
"def read_templates(folder=None):\n \"\"\"\n Load yaml templates from template folder. Return list of dicts.\n\n Use built-in templates if no folder is set.\n\n Parameters\n ----------\n folder : str\n user defined folder where they stores their files, if None uses built-in templates\n\n Returns\n -------\n output : Instance of `InvoiceTemplate`\n template which match based on keywords\n\n Examples\n --------\n\n >>> read_template(\"home/duskybomb/invoice-templates/\")\n InvoiceTemplate([('issuer', 'OYO'), ('fields', OrderedDict([('amount', 'GrandTotalRs(\\\\d+)'),\n ('date', 'Date:(\\\\d{1,2}\\\\/\\\\d{1,2}\\\\/\\\\d{1,4})'), ('invoice_number', '([A-Z0-9]+)CashatHotel')])),\n ('keywords', ['OYO', 'Oravel', 'Stays']), ('options', OrderedDict([('currency', 'INR'), ('decimal_separator', '.'),\n ('remove_whitespace', True)])), ('template_name', 'com.oyo.invoice.yml')])\n\n After reading the template you can use the result as an instance of `InvoiceTemplate` to extract fields from\n `extract_data()`\n\n >>> my_template = InvoiceTemplate([('issuer', 'OYO'), ('fields', OrderedDict([('amount', 'GrandTotalRs(\\\\d+)'),\n ('date', 'Date:(\\\\d{1,2}\\\\/\\\\d{1,2}\\\\/\\\\d{1,4})'), ('invoice_number', '([A-Z0-9]+)CashatHotel')])),\n ('keywords', ['OYO', 'Oravel', 'Stays']), ('options', OrderedDict([('currency', 'INR'), ('decimal_separator', '.'),\n ('remove_whitespace', True)])), ('template_name', 'com.oyo.invoice.yml')])\n >>> extract_data(\"invoice2data/test/pdfs/oyo.pdf\", my_template, pdftotext)\n {'issuer': 'OYO', 'amount': 1939.0, 'date': datetime.datetime(2017, 12, 31, 0, 0), 'invoice_number': 'IBZY2087',\n 'currency': 'INR', 'desc': 'Invoice IBZY2087 from OYO'}\n\n \"\"\"\n\n output = []\n\n if folder is None:\n folder = pkg_resources.resource_filename(__name__, 'templates')\n\n for path, subdirs, files in os.walk(folder):\n for name in sorted(files):\n if name.endswith('.yml'):\n with open(os.path.join(path, name), 'rb') as f:\n encoding = chardet.detect(f.read())['encoding']\n with codecs.open(os.path.join(path, name), encoding=encoding) as template_file:\n tpl = ordered_load(template_file.read())\n tpl['template_name'] = name\n\n # Test if all required fields are in template:\n assert 'keywords' in tpl.keys(), 'Missing keywords field.'\n\n # Keywords as list, if only one.\n if type(tpl['keywords']) is not list:\n tpl['keywords'] = [tpl['keywords']]\n\n output.append(InvoiceTemplate(tpl))\n return output\n"
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import shutil
import os
from os.path import join
import logging
from .input import pdftotext
from .input import pdfminer_wrapper
from .input import tesseract
from .input import tesseract4
from .input import gvision
from invoice2data.extract.loader import read_templates
from .output import to_csv
from .output import to_json
from .output import to_xml
logger = logging.getLogger(__name__)
input_mapping = {
'pdftotext': pdftotext,
'tesseract': tesseract,
'tesseract4': tesseract4,
'pdfminer': pdfminer_wrapper,
'gvision': gvision,
}
output_mapping = {'csv': to_csv, 'json': to_json, 'xml': to_xml, 'none': None}
def create_parser():
"""Returns argument parser """
parser = argparse.ArgumentParser(
description='Extract structured data from PDF files and save to CSV or JSON.'
)
parser.add_argument(
'--input-reader',
choices=input_mapping.keys(),
default='pdftotext',
help='Choose text extraction function. Default: pdftotext',
)
parser.add_argument(
'--output-format',
choices=output_mapping.keys(),
default='none',
help='Choose output format. Default: none',
)
parser.add_argument(
'--output-name',
'-o',
dest='output_name',
default='invoices-output',
help='Custom name for output file. Extension is added based on chosen format.',
)
parser.add_argument(
'--debug', dest='debug', action='store_true', help='Enable debug information.'
)
parser.add_argument(
'--copy', '-c', dest='copy', help='Copy and rename processed PDFs to specified folder.'
)
parser.add_argument(
'--move', '-m', dest='move', help='Move and rename processed PDFs to specified folder.'
)
parser.add_argument(
'--filename-format',
dest='filename',
default="{date} {invoice_number} {desc}.pdf",
help='Filename format to use when moving or copying processed PDFs.'
'Default: "{date} {invoice_number} {desc}.pdf"',
)
parser.add_argument(
'--template-folder',
'-t',
dest='template_folder',
help='Folder containing invoice templates in yml file. Always adds built-in templates.',
)
parser.add_argument(
'--exclude-built-in-templates',
dest='exclude_built_in_templates',
default=False,
help='Ignore built-in templates.',
action="store_true",
)
parser.add_argument(
'input_files', type=argparse.FileType('r'), nargs='+', help='File or directory to analyze.'
)
return parser
def main(args=None):
"""Take folder or single file and analyze each."""
if args is None:
parser = create_parser()
args = parser.parse_args()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
input_module = input_mapping[args.input_reader]
output_module = output_mapping[args.output_format]
templates = []
# Load templates from external folder if set.
if args.template_folder:
templates += read_templates(os.path.abspath(args.template_folder))
# Load internal templates, if not disabled.
if not args.exclude_built_in_templates:
templates += read_templates()
output = []
for f in args.input_files:
res = extract_data(f.name, templates=templates, input_module=input_module)
if res:
logger.info(res)
output.append(res)
if args.copy:
filename = args.filename.format(
date=res['date'].strftime('%Y-%m-%d'),
invoice_number=res['invoice_number'],
desc=res['desc'],
)
shutil.copyfile(f.name, join(args.copy, filename))
if args.move:
filename = args.filename.format(
date=res['date'].strftime('%Y-%m-%d'),
invoice_number=res['invoice_number'],
desc=res['desc'],
)
shutil.move(f.name, join(args.move, filename))
f.close()
if output_module is not None:
output_module.write_to_file(output, args.output_name)
if __name__ == '__main__':
main()
|
invoice-x/invoice2data
|
src/invoice2data/main.py
|
create_parser
|
python
|
def create_parser():
parser = argparse.ArgumentParser(
description='Extract structured data from PDF files and save to CSV or JSON.'
)
parser.add_argument(
'--input-reader',
choices=input_mapping.keys(),
default='pdftotext',
help='Choose text extraction function. Default: pdftotext',
)
parser.add_argument(
'--output-format',
choices=output_mapping.keys(),
default='none',
help='Choose output format. Default: none',
)
parser.add_argument(
'--output-name',
'-o',
dest='output_name',
default='invoices-output',
help='Custom name for output file. Extension is added based on chosen format.',
)
parser.add_argument(
'--debug', dest='debug', action='store_true', help='Enable debug information.'
)
parser.add_argument(
'--copy', '-c', dest='copy', help='Copy and rename processed PDFs to specified folder.'
)
parser.add_argument(
'--move', '-m', dest='move', help='Move and rename processed PDFs to specified folder.'
)
parser.add_argument(
'--filename-format',
dest='filename',
default="{date} {invoice_number} {desc}.pdf",
help='Filename format to use when moving or copying processed PDFs.'
'Default: "{date} {invoice_number} {desc}.pdf"',
)
parser.add_argument(
'--template-folder',
'-t',
dest='template_folder',
help='Folder containing invoice templates in yml file. Always adds built-in templates.',
)
parser.add_argument(
'--exclude-built-in-templates',
dest='exclude_built_in_templates',
default=False,
help='Ignore built-in templates.',
action="store_true",
)
parser.add_argument(
'input_files', type=argparse.FileType('r'), nargs='+', help='File or directory to analyze.'
)
return parser
|
Returns argument parser
|
train
|
https://github.com/invoice-x/invoice2data/blob/d97fdc5db9c1844fd77fa64f8ea7c42fefd0ba20/src/invoice2data/main.py#L99-L167
| null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import shutil
import os
from os.path import join
import logging
from .input import pdftotext
from .input import pdfminer_wrapper
from .input import tesseract
from .input import tesseract4
from .input import gvision
from invoice2data.extract.loader import read_templates
from .output import to_csv
from .output import to_json
from .output import to_xml
logger = logging.getLogger(__name__)
input_mapping = {
'pdftotext': pdftotext,
'tesseract': tesseract,
'tesseract4': tesseract4,
'pdfminer': pdfminer_wrapper,
'gvision': gvision,
}
output_mapping = {'csv': to_csv, 'json': to_json, 'xml': to_xml, 'none': None}
def extract_data(invoicefile, templates=None, input_module=pdftotext):
"""Extracts structured data from PDF/image invoices.
This function uses the text extracted from a PDF file or image and
pre-defined regex templates to find structured data.
Reads template if no template assigned
Required fields are matches from templates
Parameters
----------
invoicefile : str
path of electronic invoice file in PDF,JPEG,PNG (example: "/home/duskybomb/pdf/invoice.pdf")
templates : list of instances of class `InvoiceTemplate`, optional
Templates are loaded using `read_template` function in `loader.py`
input_module : {'pdftotext', 'pdfminer', 'tesseract'}, optional
library to be used to extract text from given `invoicefile`,
Returns
-------
dict or False
extracted and matched fields or False if no template matches
Notes
-----
Import required `input_module` when using invoice2data as a library
See Also
--------
read_template : Function where templates are loaded
InvoiceTemplate : Class representing single template files that live as .yml files on the disk
Examples
--------
When using `invoice2data` as an library
>>> from invoice2data.input import pdftotext
>>> extract_data("invoice2data/test/pdfs/oyo.pdf", None, pdftotext)
{'issuer': 'OYO', 'amount': 1939.0, 'date': datetime.datetime(2017, 12, 31, 0, 0), 'invoice_number': 'IBZY2087',
'currency': 'INR', 'desc': 'Invoice IBZY2087 from OYO'}
"""
if templates is None:
templates = read_templates()
# print(templates[0])
extracted_str = input_module.to_text(invoicefile).decode('utf-8')
logger.debug('START pdftotext result ===========================')
logger.debug(extracted_str)
logger.debug('END pdftotext result =============================')
logger.debug('Testing {} template files'.format(len(templates)))
for t in templates:
optimized_str = t.prepare_input(extracted_str)
if t.matches_input(optimized_str):
return t.extract(optimized_str)
logger.error('No template for %s', invoicefile)
return False
def main(args=None):
"""Take folder or single file and analyze each."""
if args is None:
parser = create_parser()
args = parser.parse_args()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
input_module = input_mapping[args.input_reader]
output_module = output_mapping[args.output_format]
templates = []
# Load templates from external folder if set.
if args.template_folder:
templates += read_templates(os.path.abspath(args.template_folder))
# Load internal templates, if not disabled.
if not args.exclude_built_in_templates:
templates += read_templates()
output = []
for f in args.input_files:
res = extract_data(f.name, templates=templates, input_module=input_module)
if res:
logger.info(res)
output.append(res)
if args.copy:
filename = args.filename.format(
date=res['date'].strftime('%Y-%m-%d'),
invoice_number=res['invoice_number'],
desc=res['desc'],
)
shutil.copyfile(f.name, join(args.copy, filename))
if args.move:
filename = args.filename.format(
date=res['date'].strftime('%Y-%m-%d'),
invoice_number=res['invoice_number'],
desc=res['desc'],
)
shutil.move(f.name, join(args.move, filename))
f.close()
if output_module is not None:
output_module.write_to_file(output, args.output_name)
if __name__ == '__main__':
main()
|
invoice-x/invoice2data
|
src/invoice2data/main.py
|
main
|
python
|
def main(args=None):
if args is None:
parser = create_parser()
args = parser.parse_args()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
input_module = input_mapping[args.input_reader]
output_module = output_mapping[args.output_format]
templates = []
# Load templates from external folder if set.
if args.template_folder:
templates += read_templates(os.path.abspath(args.template_folder))
# Load internal templates, if not disabled.
if not args.exclude_built_in_templates:
templates += read_templates()
output = []
for f in args.input_files:
res = extract_data(f.name, templates=templates, input_module=input_module)
if res:
logger.info(res)
output.append(res)
if args.copy:
filename = args.filename.format(
date=res['date'].strftime('%Y-%m-%d'),
invoice_number=res['invoice_number'],
desc=res['desc'],
)
shutil.copyfile(f.name, join(args.copy, filename))
if args.move:
filename = args.filename.format(
date=res['date'].strftime('%Y-%m-%d'),
invoice_number=res['invoice_number'],
desc=res['desc'],
)
shutil.move(f.name, join(args.move, filename))
f.close()
if output_module is not None:
output_module.write_to_file(output, args.output_name)
|
Take folder or single file and analyze each.
|
train
|
https://github.com/invoice-x/invoice2data/blob/d97fdc5db9c1844fd77fa64f8ea7c42fefd0ba20/src/invoice2data/main.py#L170-L215
|
[
"def extract_data(invoicefile, templates=None, input_module=pdftotext):\n \"\"\"Extracts structured data from PDF/image invoices.\n\n This function uses the text extracted from a PDF file or image and\n pre-defined regex templates to find structured data.\n\n Reads template if no template assigned\n Required fields are matches from templates\n\n Parameters\n ----------\n invoicefile : str\n path of electronic invoice file in PDF,JPEG,PNG (example: \"/home/duskybomb/pdf/invoice.pdf\")\n templates : list of instances of class `InvoiceTemplate`, optional\n Templates are loaded using `read_template` function in `loader.py`\n input_module : {'pdftotext', 'pdfminer', 'tesseract'}, optional\n library to be used to extract text from given `invoicefile`,\n\n Returns\n -------\n dict or False\n extracted and matched fields or False if no template matches\n\n Notes\n -----\n Import required `input_module` when using invoice2data as a library\n\n See Also\n --------\n read_template : Function where templates are loaded\n InvoiceTemplate : Class representing single template files that live as .yml files on the disk\n\n Examples\n --------\n When using `invoice2data` as an library\n\n >>> from invoice2data.input import pdftotext\n >>> extract_data(\"invoice2data/test/pdfs/oyo.pdf\", None, pdftotext)\n {'issuer': 'OYO', 'amount': 1939.0, 'date': datetime.datetime(2017, 12, 31, 0, 0), 'invoice_number': 'IBZY2087',\n 'currency': 'INR', 'desc': 'Invoice IBZY2087 from OYO'}\n\n \"\"\"\n if templates is None:\n templates = read_templates()\n\n # print(templates[0])\n extracted_str = input_module.to_text(invoicefile).decode('utf-8')\n\n logger.debug('START pdftotext result ===========================')\n logger.debug(extracted_str)\n logger.debug('END pdftotext result =============================')\n\n logger.debug('Testing {} template files'.format(len(templates)))\n for t in templates:\n optimized_str = t.prepare_input(extracted_str)\n\n if t.matches_input(optimized_str):\n return t.extract(optimized_str)\n\n logger.error('No template for %s', invoicefile)\n return False\n",
"def create_parser():\n \"\"\"Returns argument parser \"\"\"\n\n parser = argparse.ArgumentParser(\n description='Extract structured data from PDF files and save to CSV or JSON.'\n )\n\n parser.add_argument(\n '--input-reader',\n choices=input_mapping.keys(),\n default='pdftotext',\n help='Choose text extraction function. Default: pdftotext',\n )\n\n parser.add_argument(\n '--output-format',\n choices=output_mapping.keys(),\n default='none',\n help='Choose output format. Default: none',\n )\n\n parser.add_argument(\n '--output-name',\n '-o',\n dest='output_name',\n default='invoices-output',\n help='Custom name for output file. Extension is added based on chosen format.',\n )\n\n parser.add_argument(\n '--debug', dest='debug', action='store_true', help='Enable debug information.'\n )\n\n parser.add_argument(\n '--copy', '-c', dest='copy', help='Copy and rename processed PDFs to specified folder.'\n )\n\n parser.add_argument(\n '--move', '-m', dest='move', help='Move and rename processed PDFs to specified folder.'\n )\n\n parser.add_argument(\n '--filename-format',\n dest='filename',\n default=\"{date} {invoice_number} {desc}.pdf\",\n help='Filename format to use when moving or copying processed PDFs.'\n 'Default: \"{date} {invoice_number} {desc}.pdf\"',\n )\n\n parser.add_argument(\n '--template-folder',\n '-t',\n dest='template_folder',\n help='Folder containing invoice templates in yml file. Always adds built-in templates.',\n )\n\n parser.add_argument(\n '--exclude-built-in-templates',\n dest='exclude_built_in_templates',\n default=False,\n help='Ignore built-in templates.',\n action=\"store_true\",\n )\n\n parser.add_argument(\n 'input_files', type=argparse.FileType('r'), nargs='+', help='File or directory to analyze.'\n )\n\n return parser\n",
"def read_templates(folder=None):\n \"\"\"\n Load yaml templates from template folder. Return list of dicts.\n\n Use built-in templates if no folder is set.\n\n Parameters\n ----------\n folder : str\n user defined folder where they stores their files, if None uses built-in templates\n\n Returns\n -------\n output : Instance of `InvoiceTemplate`\n template which match based on keywords\n\n Examples\n --------\n\n >>> read_template(\"home/duskybomb/invoice-templates/\")\n InvoiceTemplate([('issuer', 'OYO'), ('fields', OrderedDict([('amount', 'GrandTotalRs(\\\\d+)'),\n ('date', 'Date:(\\\\d{1,2}\\\\/\\\\d{1,2}\\\\/\\\\d{1,4})'), ('invoice_number', '([A-Z0-9]+)CashatHotel')])),\n ('keywords', ['OYO', 'Oravel', 'Stays']), ('options', OrderedDict([('currency', 'INR'), ('decimal_separator', '.'),\n ('remove_whitespace', True)])), ('template_name', 'com.oyo.invoice.yml')])\n\n After reading the template you can use the result as an instance of `InvoiceTemplate` to extract fields from\n `extract_data()`\n\n >>> my_template = InvoiceTemplate([('issuer', 'OYO'), ('fields', OrderedDict([('amount', 'GrandTotalRs(\\\\d+)'),\n ('date', 'Date:(\\\\d{1,2}\\\\/\\\\d{1,2}\\\\/\\\\d{1,4})'), ('invoice_number', '([A-Z0-9]+)CashatHotel')])),\n ('keywords', ['OYO', 'Oravel', 'Stays']), ('options', OrderedDict([('currency', 'INR'), ('decimal_separator', '.'),\n ('remove_whitespace', True)])), ('template_name', 'com.oyo.invoice.yml')])\n >>> extract_data(\"invoice2data/test/pdfs/oyo.pdf\", my_template, pdftotext)\n {'issuer': 'OYO', 'amount': 1939.0, 'date': datetime.datetime(2017, 12, 31, 0, 0), 'invoice_number': 'IBZY2087',\n 'currency': 'INR', 'desc': 'Invoice IBZY2087 from OYO'}\n\n \"\"\"\n\n output = []\n\n if folder is None:\n folder = pkg_resources.resource_filename(__name__, 'templates')\n\n for path, subdirs, files in os.walk(folder):\n for name in sorted(files):\n if name.endswith('.yml'):\n with open(os.path.join(path, name), 'rb') as f:\n encoding = chardet.detect(f.read())['encoding']\n with codecs.open(os.path.join(path, name), encoding=encoding) as template_file:\n tpl = ordered_load(template_file.read())\n tpl['template_name'] = name\n\n # Test if all required fields are in template:\n assert 'keywords' in tpl.keys(), 'Missing keywords field.'\n\n # Keywords as list, if only one.\n if type(tpl['keywords']) is not list:\n tpl['keywords'] = [tpl['keywords']]\n\n output.append(InvoiceTemplate(tpl))\n return output\n"
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import shutil
import os
from os.path import join
import logging
from .input import pdftotext
from .input import pdfminer_wrapper
from .input import tesseract
from .input import tesseract4
from .input import gvision
from invoice2data.extract.loader import read_templates
from .output import to_csv
from .output import to_json
from .output import to_xml
logger = logging.getLogger(__name__)
input_mapping = {
'pdftotext': pdftotext,
'tesseract': tesseract,
'tesseract4': tesseract4,
'pdfminer': pdfminer_wrapper,
'gvision': gvision,
}
output_mapping = {'csv': to_csv, 'json': to_json, 'xml': to_xml, 'none': None}
def extract_data(invoicefile, templates=None, input_module=pdftotext):
"""Extracts structured data from PDF/image invoices.
This function uses the text extracted from a PDF file or image and
pre-defined regex templates to find structured data.
Reads template if no template assigned
Required fields are matches from templates
Parameters
----------
invoicefile : str
path of electronic invoice file in PDF,JPEG,PNG (example: "/home/duskybomb/pdf/invoice.pdf")
templates : list of instances of class `InvoiceTemplate`, optional
Templates are loaded using `read_template` function in `loader.py`
input_module : {'pdftotext', 'pdfminer', 'tesseract'}, optional
library to be used to extract text from given `invoicefile`,
Returns
-------
dict or False
extracted and matched fields or False if no template matches
Notes
-----
Import required `input_module` when using invoice2data as a library
See Also
--------
read_template : Function where templates are loaded
InvoiceTemplate : Class representing single template files that live as .yml files on the disk
Examples
--------
When using `invoice2data` as an library
>>> from invoice2data.input import pdftotext
>>> extract_data("invoice2data/test/pdfs/oyo.pdf", None, pdftotext)
{'issuer': 'OYO', 'amount': 1939.0, 'date': datetime.datetime(2017, 12, 31, 0, 0), 'invoice_number': 'IBZY2087',
'currency': 'INR', 'desc': 'Invoice IBZY2087 from OYO'}
"""
if templates is None:
templates = read_templates()
# print(templates[0])
extracted_str = input_module.to_text(invoicefile).decode('utf-8')
logger.debug('START pdftotext result ===========================')
logger.debug(extracted_str)
logger.debug('END pdftotext result =============================')
logger.debug('Testing {} template files'.format(len(templates)))
for t in templates:
optimized_str = t.prepare_input(extracted_str)
if t.matches_input(optimized_str):
return t.extract(optimized_str)
logger.error('No template for %s', invoicefile)
return False
def create_parser():
"""Returns argument parser """
parser = argparse.ArgumentParser(
description='Extract structured data from PDF files and save to CSV or JSON.'
)
parser.add_argument(
'--input-reader',
choices=input_mapping.keys(),
default='pdftotext',
help='Choose text extraction function. Default: pdftotext',
)
parser.add_argument(
'--output-format',
choices=output_mapping.keys(),
default='none',
help='Choose output format. Default: none',
)
parser.add_argument(
'--output-name',
'-o',
dest='output_name',
default='invoices-output',
help='Custom name for output file. Extension is added based on chosen format.',
)
parser.add_argument(
'--debug', dest='debug', action='store_true', help='Enable debug information.'
)
parser.add_argument(
'--copy', '-c', dest='copy', help='Copy and rename processed PDFs to specified folder.'
)
parser.add_argument(
'--move', '-m', dest='move', help='Move and rename processed PDFs to specified folder.'
)
parser.add_argument(
'--filename-format',
dest='filename',
default="{date} {invoice_number} {desc}.pdf",
help='Filename format to use when moving or copying processed PDFs.'
'Default: "{date} {invoice_number} {desc}.pdf"',
)
parser.add_argument(
'--template-folder',
'-t',
dest='template_folder',
help='Folder containing invoice templates in yml file. Always adds built-in templates.',
)
parser.add_argument(
'--exclude-built-in-templates',
dest='exclude_built_in_templates',
default=False,
help='Ignore built-in templates.',
action="store_true",
)
parser.add_argument(
'input_files', type=argparse.FileType('r'), nargs='+', help='File or directory to analyze.'
)
return parser
if __name__ == '__main__':
main()
|
invoice-x/invoice2data
|
src/invoice2data/output/to_xml.py
|
write_to_file
|
python
|
def write_to_file(data, path):
if path.endswith('.xml'):
filename = path
else:
filename = path + '.xml'
tag_data = ET.Element('data')
xml_file = open(filename, "w")
i = 0
for line in data:
i += 1
tag_item = ET.SubElement(tag_data, 'item')
tag_date = ET.SubElement(tag_item, 'date')
tag_desc = ET.SubElement(tag_item, 'desc')
tag_currency = ET.SubElement(tag_item, 'currency')
tag_amount = ET.SubElement(tag_item, 'amount')
tag_item.set('id', str(i))
tag_date.text = line['date'].strftime('%d/%m/%Y')
tag_desc.text = line['desc']
tag_currency.text = line['currency']
tag_amount.text = str(line['amount'])
xml_file.write(prettify(tag_data))
xml_file.close()
|
Export extracted fields to xml
Appends .xml to path if missing and generates xml file in specified directory, if not then in root
Parameters
----------
data : dict
Dictionary of extracted fields
path : str
directory to save generated xml file
Notes
----
Do give file name to the function parameter path.
Only `date`, `desc`, `amount` and `currency` are exported
Examples
--------
>>> from invoice2data.output import to_xml
>>> to_xml.write_to_file(data, "/exported_xml/invoice.xml")
>>> to_xml.write_to_file(data, "invoice.xml")
|
train
|
https://github.com/invoice-x/invoice2data/blob/d97fdc5db9c1844fd77fa64f8ea7c42fefd0ba20/src/invoice2data/output/to_xml.py#L12-L59
|
[
"def prettify(elem):\n \"\"\"Return a pretty-printed XML string for the Element.\"\"\"\n rough_string = ET.tostring(elem, 'utf-8')\n reparsed = minidom.parseString(rough_string)\n return reparsed.toprettyxml(indent=\" \")\n"
] |
import xml.etree.ElementTree as ET
from xml.dom import minidom
def prettify(elem):
"""Return a pretty-printed XML string for the Element."""
rough_string = ET.tostring(elem, 'utf-8')
reparsed = minidom.parseString(rough_string)
return reparsed.toprettyxml(indent=" ")
|
invoice-x/invoice2data
|
src/invoice2data/extract/loader.py
|
read_templates
|
python
|
def read_templates(folder=None):
output = []
if folder is None:
folder = pkg_resources.resource_filename(__name__, 'templates')
for path, subdirs, files in os.walk(folder):
for name in sorted(files):
if name.endswith('.yml'):
with open(os.path.join(path, name), 'rb') as f:
encoding = chardet.detect(f.read())['encoding']
with codecs.open(os.path.join(path, name), encoding=encoding) as template_file:
tpl = ordered_load(template_file.read())
tpl['template_name'] = name
# Test if all required fields are in template:
assert 'keywords' in tpl.keys(), 'Missing keywords field.'
# Keywords as list, if only one.
if type(tpl['keywords']) is not list:
tpl['keywords'] = [tpl['keywords']]
output.append(InvoiceTemplate(tpl))
return output
|
Load yaml templates from template folder. Return list of dicts.
Use built-in templates if no folder is set.
Parameters
----------
folder : str
user defined folder where they stores their files, if None uses built-in templates
Returns
-------
output : Instance of `InvoiceTemplate`
template which match based on keywords
Examples
--------
>>> read_template("home/duskybomb/invoice-templates/")
InvoiceTemplate([('issuer', 'OYO'), ('fields', OrderedDict([('amount', 'GrandTotalRs(\\d+)'),
('date', 'Date:(\\d{1,2}\\/\\d{1,2}\\/\\d{1,4})'), ('invoice_number', '([A-Z0-9]+)CashatHotel')])),
('keywords', ['OYO', 'Oravel', 'Stays']), ('options', OrderedDict([('currency', 'INR'), ('decimal_separator', '.'),
('remove_whitespace', True)])), ('template_name', 'com.oyo.invoice.yml')])
After reading the template you can use the result as an instance of `InvoiceTemplate` to extract fields from
`extract_data()`
>>> my_template = InvoiceTemplate([('issuer', 'OYO'), ('fields', OrderedDict([('amount', 'GrandTotalRs(\\d+)'),
('date', 'Date:(\\d{1,2}\\/\\d{1,2}\\/\\d{1,4})'), ('invoice_number', '([A-Z0-9]+)CashatHotel')])),
('keywords', ['OYO', 'Oravel', 'Stays']), ('options', OrderedDict([('currency', 'INR'), ('decimal_separator', '.'),
('remove_whitespace', True)])), ('template_name', 'com.oyo.invoice.yml')])
>>> extract_data("invoice2data/test/pdfs/oyo.pdf", my_template, pdftotext)
{'issuer': 'OYO', 'amount': 1939.0, 'date': datetime.datetime(2017, 12, 31, 0, 0), 'invoice_number': 'IBZY2087',
'currency': 'INR', 'desc': 'Invoice IBZY2087 from OYO'}
|
train
|
https://github.com/invoice-x/invoice2data/blob/d97fdc5db9c1844fd77fa64f8ea7c42fefd0ba20/src/invoice2data/extract/loader.py#L39-L99
|
[
"def ordered_load(stream, Loader=yaml.Loader, object_pairs_hook=OrderedDict):\n \"\"\"load mappings and ordered mappings\n\n loader to load mappings and ordered mappings into the Python 2.7+ OrderedDict type,\n instead of the vanilla dict and the list of pairs it currently uses.\n \"\"\"\n\n class OrderedLoader(Loader):\n pass\n\n def construct_mapping(loader, node):\n loader.flatten_mapping(node)\n return object_pairs_hook(loader.construct_pairs(node))\n\n OrderedLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping)\n\n return yaml.load(stream, OrderedLoader)\n"
] |
"""
This module abstracts templates for invoice providers.
Templates are initially read from .yml files and then kept as class.
"""
import os
import yaml
import pkg_resources
from collections import OrderedDict
import logging as logger
from .invoice_template import InvoiceTemplate
import codecs
import chardet
logger.getLogger('chardet').setLevel(logger.WARNING)
# borrowed from http://stackoverflow.com/a/21912744
def ordered_load(stream, Loader=yaml.Loader, object_pairs_hook=OrderedDict):
"""load mappings and ordered mappings
loader to load mappings and ordered mappings into the Python 2.7+ OrderedDict type,
instead of the vanilla dict and the list of pairs it currently uses.
"""
class OrderedLoader(Loader):
pass
def construct_mapping(loader, node):
loader.flatten_mapping(node)
return object_pairs_hook(loader.construct_pairs(node))
OrderedLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping)
return yaml.load(stream, OrderedLoader)
|
invoice-x/invoice2data
|
src/invoice2data/input/pdfminer_wrapper.py
|
to_text
|
python
|
def to_text(path):
try:
# python 2
from StringIO import StringIO
import sys
reload(sys) # noqa: F821
sys.setdefaultencoding('utf8')
except ImportError:
from io import StringIO
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import TextConverter
from pdfminer.layout import LAParams
from pdfminer.pdfpage import PDFPage
rsrcmgr = PDFResourceManager()
retstr = StringIO()
codec = 'utf-8'
laparams = LAParams()
laparams.all_texts = True
device = TextConverter(rsrcmgr, retstr, codec=codec, laparams=laparams)
with open(path, 'rb') as fp:
interpreter = PDFPageInterpreter(rsrcmgr, device)
password = ""
maxpages = 0
caching = True
pagenos = set()
pages = PDFPage.get_pages(
fp,
pagenos,
maxpages=maxpages,
password=password,
caching=caching,
check_extractable=True,
)
for page in pages:
interpreter.process_page(page)
device.close()
str = retstr.getvalue()
retstr.close()
return str.encode('utf-8')
|
Wrapper around `pdfminer`.
Parameters
----------
path : str
path of electronic invoice in PDF
Returns
-------
str : str
returns extracted text from pdf
|
train
|
https://github.com/invoice-x/invoice2data/blob/d97fdc5db9c1844fd77fa64f8ea7c42fefd0ba20/src/invoice2data/input/pdfminer_wrapper.py#L2-L57
| null |
# -*- coding: utf-8 -*-
|
invoice-x/invoice2data
|
src/invoice2data/extract/plugins/lines.py
|
extract
|
python
|
def extract(self, content, output):
# First apply default options.
plugin_settings = DEFAULT_OPTIONS.copy()
plugin_settings.update(self['lines'])
self['lines'] = plugin_settings
# Validate settings
assert 'start' in self['lines'], 'Lines start regex missing'
assert 'end' in self['lines'], 'Lines end regex missing'
assert 'line' in self['lines'], 'Line regex missing'
start = re.search(self['lines']['start'], content)
end = re.search(self['lines']['end'], content)
if not start or not end:
logger.warning('no lines found - start %s, end %s', start, end)
return
content = content[start.end(): end.start()]
lines = []
current_row = {}
if 'first_line' not in self['lines'] and 'last_line' not in self['lines']:
self['lines']['first_line'] = self['lines']['line']
for line in re.split(self['lines']['line_separator'], content):
# if the line has empty lines in it , skip them
if not line.strip('').strip('\n') or not line:
continue
if 'first_line' in self['lines']:
match = re.search(self['lines']['first_line'], line)
if match:
if 'last_line' not in self['lines']:
if current_row:
lines.append(current_row)
current_row = {}
if current_row:
lines.append(current_row)
current_row = {
field: value.strip() if value else ''
for field, value in match.groupdict().items()
}
continue
if 'last_line' in self['lines']:
match = re.search(self['lines']['last_line'], line)
if match:
for field, value in match.groupdict().items():
current_row[field] = '%s%s%s' % (
current_row.get(field, ''),
current_row.get(field, '') and '\n' or '',
value.strip() if value else '',
)
if current_row:
lines.append(current_row)
current_row = {}
continue
match = re.search(self['lines']['line'], line)
if match:
for field, value in match.groupdict().items():
current_row[field] = '%s%s%s' % (
current_row.get(field, ''),
current_row.get(field, '') and '\n' or '',
value.strip() if value else '',
)
continue
logger.debug('ignoring *%s* because it doesn\'t match anything', line)
if current_row:
lines.append(current_row)
types = self['lines'].get('types', [])
for row in lines:
for name in row.keys():
if name in types:
row[name] = self.coerce_type(row[name], types[name])
if lines:
output['lines'] = lines
|
Try to extract lines from the invoice
|
train
|
https://github.com/invoice-x/invoice2data/blob/d97fdc5db9c1844fd77fa64f8ea7c42fefd0ba20/src/invoice2data/extract/plugins/lines.py#L13-L87
| null |
"""
Plugin to extract individual lines from an invoice.
Initial work and maintenance by Holger Brunn @hbrunn
"""
import re
import logging as logger
DEFAULT_OPTIONS = {'field_separator': r'\s+', 'line_separator': r'\n'}
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords._create_session
|
python
|
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
|
Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L57-L155
| null |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.create_session
|
python
|
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
|
Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L157-L207
|
[
"def _create_session(\n self,\n alias,\n url,\n headers,\n cookies,\n auth,\n timeout,\n max_retries,\n backoff_factor,\n proxies,\n verify,\n debug,\n disable_warnings):\n \"\"\" Create Session: create a HTTP session to a server\n\n ``url`` Base url of the server\n\n ``alias`` Robot Framework alias to identify the session\n\n ``headers`` Dictionary of default headers\n\n ``cookies`` Dictionary of cookies\n\n ``auth`` List of username & password for HTTP Basic Auth\n\n ``timeout`` Connection timeout\n\n ``max_retries`` The maximum number of retries each connection should attempt.\n\n ``backoff_factor`` The pause between for each retry\n\n ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication\n\n ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.\n\n ``debug`` Enable http verbosity option more information\n https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel\n\n ``disable_warnings`` Disable requests warning useful when you have large number of testcases\n \"\"\"\n\n self.builtin.log('Creating session: %s' % alias, 'DEBUG')\n s = session = requests.Session()\n s.headers.update(headers)\n s.auth = auth if auth else s.auth\n s.proxies = proxies if proxies else s.proxies\n\n try:\n max_retries = int(max_retries)\n except ValueError as err:\n raise ValueError(\"Error converting max_retries parameter: %s\" % err)\n\n if max_retries > 0:\n http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))\n https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))\n\n # Replace the session's original adapters\n s.mount('http://', http)\n s.mount('https://', https)\n\n # Disable requests warnings, useful when you have large number of testcase\n # you will observe drastical changes in Robot log.html and output.xml files size\n if disable_warnings:\n logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests\n logging.getLogger().setLevel(logging.ERROR)\n requests_log = logging.getLogger(\"requests\")\n requests_log.setLevel(logging.ERROR)\n requests_log.propagate = True\n if not verify:\n requests.packages.urllib3.disable_warnings()\n\n # verify can be a Boolean or a String\n if isinstance(verify, bool):\n s.verify = verify\n elif isinstance(verify, str) or isinstance(verify, unicode):\n if verify.lower() == 'true' or verify.lower() == 'false':\n s.verify = self.builtin.convert_to_boolean(verify)\n else:\n # String for CA_BUNDLE, not a Boolean String\n s.verify = verify\n else:\n # not a Boolean nor a String\n s.verify = verify\n\n # cant pass these into the Session anymore\n self.timeout = float(timeout) if timeout is not None else None\n self.cookies = cookies\n self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None\n\n s.url = url\n\n # Enable http verbosity\n if int(debug) >= 1:\n self.debug = int(debug)\n httplib.HTTPConnection.debuglevel = self.debug\n\n self._cache.register(session, alias=alias)\n return session\n"
] |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.create_ntlm_session
|
python
|
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
|
Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L272-L340
|
[
"def _create_session(\n self,\n alias,\n url,\n headers,\n cookies,\n auth,\n timeout,\n max_retries,\n backoff_factor,\n proxies,\n verify,\n debug,\n disable_warnings):\n \"\"\" Create Session: create a HTTP session to a server\n\n ``url`` Base url of the server\n\n ``alias`` Robot Framework alias to identify the session\n\n ``headers`` Dictionary of default headers\n\n ``cookies`` Dictionary of cookies\n\n ``auth`` List of username & password for HTTP Basic Auth\n\n ``timeout`` Connection timeout\n\n ``max_retries`` The maximum number of retries each connection should attempt.\n\n ``backoff_factor`` The pause between for each retry\n\n ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication\n\n ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.\n\n ``debug`` Enable http verbosity option more information\n https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel\n\n ``disable_warnings`` Disable requests warning useful when you have large number of testcases\n \"\"\"\n\n self.builtin.log('Creating session: %s' % alias, 'DEBUG')\n s = session = requests.Session()\n s.headers.update(headers)\n s.auth = auth if auth else s.auth\n s.proxies = proxies if proxies else s.proxies\n\n try:\n max_retries = int(max_retries)\n except ValueError as err:\n raise ValueError(\"Error converting max_retries parameter: %s\" % err)\n\n if max_retries > 0:\n http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))\n https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))\n\n # Replace the session's original adapters\n s.mount('http://', http)\n s.mount('https://', https)\n\n # Disable requests warnings, useful when you have large number of testcase\n # you will observe drastical changes in Robot log.html and output.xml files size\n if disable_warnings:\n logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests\n logging.getLogger().setLevel(logging.ERROR)\n requests_log = logging.getLogger(\"requests\")\n requests_log.setLevel(logging.ERROR)\n requests_log.propagate = True\n if not verify:\n requests.packages.urllib3.disable_warnings()\n\n # verify can be a Boolean or a String\n if isinstance(verify, bool):\n s.verify = verify\n elif isinstance(verify, str) or isinstance(verify, unicode):\n if verify.lower() == 'true' or verify.lower() == 'false':\n s.verify = self.builtin.convert_to_boolean(verify)\n else:\n # String for CA_BUNDLE, not a Boolean String\n s.verify = verify\n else:\n # not a Boolean nor a String\n s.verify = verify\n\n # cant pass these into the Session anymore\n self.timeout = float(timeout) if timeout is not None else None\n self.cookies = cookies\n self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None\n\n s.url = url\n\n # Enable http verbosity\n if int(debug) >= 1:\n self.debug = int(debug)\n httplib.HTTPConnection.debuglevel = self.debug\n\n self._cache.register(session, alias=alias)\n return session\n"
] |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.create_digest_session
|
python
|
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
|
Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L342-L387
|
[
"def _create_session(\n self,\n alias,\n url,\n headers,\n cookies,\n auth,\n timeout,\n max_retries,\n backoff_factor,\n proxies,\n verify,\n debug,\n disable_warnings):\n \"\"\" Create Session: create a HTTP session to a server\n\n ``url`` Base url of the server\n\n ``alias`` Robot Framework alias to identify the session\n\n ``headers`` Dictionary of default headers\n\n ``cookies`` Dictionary of cookies\n\n ``auth`` List of username & password for HTTP Basic Auth\n\n ``timeout`` Connection timeout\n\n ``max_retries`` The maximum number of retries each connection should attempt.\n\n ``backoff_factor`` The pause between for each retry\n\n ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication\n\n ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.\n\n ``debug`` Enable http verbosity option more information\n https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel\n\n ``disable_warnings`` Disable requests warning useful when you have large number of testcases\n \"\"\"\n\n self.builtin.log('Creating session: %s' % alias, 'DEBUG')\n s = session = requests.Session()\n s.headers.update(headers)\n s.auth = auth if auth else s.auth\n s.proxies = proxies if proxies else s.proxies\n\n try:\n max_retries = int(max_retries)\n except ValueError as err:\n raise ValueError(\"Error converting max_retries parameter: %s\" % err)\n\n if max_retries > 0:\n http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))\n https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))\n\n # Replace the session's original adapters\n s.mount('http://', http)\n s.mount('https://', https)\n\n # Disable requests warnings, useful when you have large number of testcase\n # you will observe drastical changes in Robot log.html and output.xml files size\n if disable_warnings:\n logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests\n logging.getLogger().setLevel(logging.ERROR)\n requests_log = logging.getLogger(\"requests\")\n requests_log.setLevel(logging.ERROR)\n requests_log.propagate = True\n if not verify:\n requests.packages.urllib3.disable_warnings()\n\n # verify can be a Boolean or a String\n if isinstance(verify, bool):\n s.verify = verify\n elif isinstance(verify, str) or isinstance(verify, unicode):\n if verify.lower() == 'true' or verify.lower() == 'false':\n s.verify = self.builtin.convert_to_boolean(verify)\n else:\n # String for CA_BUNDLE, not a Boolean String\n s.verify = verify\n else:\n # not a Boolean nor a String\n s.verify = verify\n\n # cant pass these into the Session anymore\n self.timeout = float(timeout) if timeout is not None else None\n self.cookies = cookies\n self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None\n\n s.url = url\n\n # Enable http verbosity\n if int(debug) >= 1:\n self.debug = int(debug)\n httplib.HTTPConnection.debuglevel = self.debug\n\n self._cache.register(session, alias=alias)\n return session\n"
] |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.create_client_cert_session
|
python
|
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
|
Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L389-L441
|
[
"def _create_session(\n self,\n alias,\n url,\n headers,\n cookies,\n auth,\n timeout,\n max_retries,\n backoff_factor,\n proxies,\n verify,\n debug,\n disable_warnings):\n \"\"\" Create Session: create a HTTP session to a server\n\n ``url`` Base url of the server\n\n ``alias`` Robot Framework alias to identify the session\n\n ``headers`` Dictionary of default headers\n\n ``cookies`` Dictionary of cookies\n\n ``auth`` List of username & password for HTTP Basic Auth\n\n ``timeout`` Connection timeout\n\n ``max_retries`` The maximum number of retries each connection should attempt.\n\n ``backoff_factor`` The pause between for each retry\n\n ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication\n\n ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.\n\n ``debug`` Enable http verbosity option more information\n https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel\n\n ``disable_warnings`` Disable requests warning useful when you have large number of testcases\n \"\"\"\n\n self.builtin.log('Creating session: %s' % alias, 'DEBUG')\n s = session = requests.Session()\n s.headers.update(headers)\n s.auth = auth if auth else s.auth\n s.proxies = proxies if proxies else s.proxies\n\n try:\n max_retries = int(max_retries)\n except ValueError as err:\n raise ValueError(\"Error converting max_retries parameter: %s\" % err)\n\n if max_retries > 0:\n http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))\n https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))\n\n # Replace the session's original adapters\n s.mount('http://', http)\n s.mount('https://', https)\n\n # Disable requests warnings, useful when you have large number of testcase\n # you will observe drastical changes in Robot log.html and output.xml files size\n if disable_warnings:\n logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests\n logging.getLogger().setLevel(logging.ERROR)\n requests_log = logging.getLogger(\"requests\")\n requests_log.setLevel(logging.ERROR)\n requests_log.propagate = True\n if not verify:\n requests.packages.urllib3.disable_warnings()\n\n # verify can be a Boolean or a String\n if isinstance(verify, bool):\n s.verify = verify\n elif isinstance(verify, str) or isinstance(verify, unicode):\n if verify.lower() == 'true' or verify.lower() == 'false':\n s.verify = self.builtin.convert_to_boolean(verify)\n else:\n # String for CA_BUNDLE, not a Boolean String\n s.verify = verify\n else:\n # not a Boolean nor a String\n s.verify = verify\n\n # cant pass these into the Session anymore\n self.timeout = float(timeout) if timeout is not None else None\n self.cookies = cookies\n self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None\n\n s.url = url\n\n # Enable http verbosity\n if int(debug) >= 1:\n self.debug = int(debug)\n httplib.HTTPConnection.debuglevel = self.debug\n\n self._cache.register(session, alias=alias)\n return session\n"
] |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.update_session
|
python
|
def update_session(self, alias, headers=None, cookies=None):
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
|
Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L449-L458
| null |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.to_json
|
python
|
def to_json(self, content, pretty_print=False):
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
|
Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L460-L477
|
[
"def _json_pretty_print(self, content):\n \"\"\"\n Pretty print a JSON object\n\n ``content`` JSON object to pretty print\n \"\"\"\n temp = json.loads(content)\n return json.dumps(\n temp,\n sort_keys=True,\n indent=4,\n separators=(\n ',',\n ': '))\n"
] |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.get_request
|
python
|
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
|
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L479-L515
|
[
"def _get_request(\n self,\n session,\n uri,\n params,\n headers,\n json,\n allow_redirects,\n timeout):\n self._capture_output()\n\n resp = session.get(self._get_url(session, uri),\n headers=headers,\n json=json,\n params=self._utf8_urlencode(params),\n allow_redirects=allow_redirects,\n timeout=self._get_timeout(timeout),\n cookies=self.cookies,\n verify=self.verify)\n\n self._print_debug()\n # Store the last session object\n session.last_resp = resp\n\n return resp\n"
] |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.post_request
|
python
|
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
|
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L550-L607
|
[
"def _body_request(\n self,\n method_name,\n session,\n uri,\n data,\n json,\n params,\n files,\n headers,\n allow_redirects,\n timeout):\n self._capture_output()\n\n method = getattr(session, method_name)\n resp = method(self._get_url(session, uri),\n data=data,\n json=json,\n params=self._utf8_urlencode(params),\n files=files,\n headers=headers,\n allow_redirects=allow_redirects,\n timeout=self._get_timeout(timeout),\n cookies=self.cookies,\n verify=self.verify)\n\n self._print_debug()\n\n # Store the last session object\n session.last_resp = resp\n\n self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')\n\n return resp\n",
"def _format_data_according_to_header(self, session, data, headers):\n headers = self._merge_headers(session, headers)\n\n if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):\n if headers['Content-Type'].find(\"application/json\") != -1:\n if not isinstance(data, types.GeneratorType):\n data = json.dumps(data)\n elif headers['Content-Type'].find(\"application/x-www-form-urlencoded\") != -1:\n data = self._utf8_urlencode(data)\n else:\n data = self._utf8_urlencode(data)\n\n return data\n",
"def _format_data_to_log_string_according_to_header(self, data, headers):\n dataStr = \"<empty>\"\n if data is not None and headers is not None and 'Content-Type' in headers:\n if (headers['Content-Type'].find(\"application/json\") != -1) or \\\n (headers['Content-Type'].find(\"application/x-www-form-urlencoded\") != -1):\n if isinstance(data, bytes):\n dataStr = data.decode('utf-8')\n else:\n dataStr = data\n else:\n dataStr = \"<\" + headers['Content-Type'] + \">\"\n\n return dataStr\n"
] |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.put
|
python
|
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
|
**Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L822-L862
|
[
"def _body_request(\n self,\n method_name,\n session,\n uri,\n data,\n json,\n params,\n files,\n headers,\n allow_redirects,\n timeout):\n self._capture_output()\n\n method = getattr(session, method_name)\n resp = method(self._get_url(session, uri),\n data=data,\n json=json,\n params=self._utf8_urlencode(params),\n files=files,\n headers=headers,\n allow_redirects=allow_redirects,\n timeout=self._get_timeout(timeout),\n cookies=self.cookies,\n verify=self.verify)\n\n self._print_debug()\n\n # Store the last session object\n session.last_resp = resp\n\n self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')\n\n return resp\n",
"def _utf8_urlencode(self, data):\n\n if self._is_string_type(data):\n return data.encode('utf-8')\n\n if not isinstance(data, dict):\n return data\n\n utf8_data = {}\n for k, v in data.items():\n if self._is_string_type(v):\n v = v.encode('utf-8')\n utf8_data[k] = v\n return urlencode(utf8_data)\n"
] |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.delete_request
|
python
|
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
|
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L864-L902
|
[
"def _delete_request(\n self,\n session,\n uri,\n data,\n json,\n params,\n headers,\n allow_redirects,\n timeout):\n self._capture_output()\n\n resp = session.delete(self._get_url(session, uri),\n data=data,\n json=json,\n params=self._utf8_urlencode(params),\n headers=headers,\n allow_redirects=allow_redirects,\n timeout=self._get_timeout(timeout),\n cookies=self.cookies,\n verify=self.verify)\n\n self._print_debug()\n\n # Store the last session object\n session.last_resp = resp\n\n return resp\n",
"def _format_data_according_to_header(self, session, data, headers):\n headers = self._merge_headers(session, headers)\n\n if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):\n if headers['Content-Type'].find(\"application/json\") != -1:\n if not isinstance(data, types.GeneratorType):\n data = json.dumps(data)\n elif headers['Content-Type'].find(\"application/x-www-form-urlencoded\") != -1:\n data = self._utf8_urlencode(data)\n else:\n data = self._utf8_urlencode(data)\n\n return data\n"
] |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.delete
|
python
|
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
|
* * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L904-L935
|
[
"def _delete_request(\n self,\n session,\n uri,\n data,\n json,\n params,\n headers,\n allow_redirects,\n timeout):\n self._capture_output()\n\n resp = session.delete(self._get_url(session, uri),\n data=data,\n json=json,\n params=self._utf8_urlencode(params),\n headers=headers,\n allow_redirects=allow_redirects,\n timeout=self._get_timeout(timeout),\n cookies=self.cookies,\n verify=self.verify)\n\n self._print_debug()\n\n # Store the last session object\n session.last_resp = resp\n\n return resp\n",
"def _utf8_urlencode(self, data):\n\n if self._is_string_type(data):\n return data.encode('utf-8')\n\n if not isinstance(data, dict):\n return data\n\n utf8_data = {}\n for k, v in data.items():\n if self._is_string_type(v):\n v = v.encode('utf-8')\n utf8_data[k] = v\n return urlencode(utf8_data)\n"
] |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.head_request
|
python
|
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
|
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L937-L961
|
[
"def _head_request(self, session, uri, headers, allow_redirects, timeout):\n self._capture_output()\n\n resp = session.head(self._get_url(session, uri),\n headers=headers,\n allow_redirects=allow_redirects,\n timeout=self._get_timeout(timeout),\n cookies=self.cookies,\n verify=self.verify)\n\n self._print_debug()\n\n # Store the last session object\n session.last_resp = resp\n\n return resp\n"
] |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.head
|
python
|
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
|
**Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L963-L988
|
[
"def _head_request(self, session, uri, headers, allow_redirects, timeout):\n self._capture_output()\n\n resp = session.head(self._get_url(session, uri),\n headers=headers,\n allow_redirects=allow_redirects,\n timeout=self._get_timeout(timeout),\n cookies=self.cookies,\n verify=self.verify)\n\n self._print_debug()\n\n # Store the last session object\n session.last_resp = resp\n\n return resp\n"
] |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.options_request
|
python
|
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
|
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L990-L1015
|
[
"def _options_request(\n self,\n session,\n uri,\n headers,\n allow_redirects,\n timeout):\n self._capture_output()\n\n resp = session.options(self._get_url(session, uri),\n headers=headers,\n cookies=self.cookies,\n allow_redirects=allow_redirects,\n timeout=self._get_timeout(timeout),\n verify=self.verify)\n\n self._print_debug()\n\n # Store the last session object\n session.last_resp = resp\n\n return resp\n"
] |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.options
|
python
|
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
|
**Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L1017-L1042
|
[
"def _options_request(\n self,\n session,\n uri,\n headers,\n allow_redirects,\n timeout):\n self._capture_output()\n\n resp = session.options(self._get_url(session, uri),\n headers=headers,\n cookies=self.cookies,\n allow_redirects=allow_redirects,\n timeout=self._get_timeout(timeout),\n verify=self.verify)\n\n self._print_debug()\n\n # Store the last session object\n session.last_resp = resp\n\n return resp\n"
] |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords._get_url
|
python
|
def _get_url(self, session, uri):
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
|
Helper method to get the full url
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L1174-L1182
| null |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords._json_pretty_print
|
python
|
def _json_pretty_print(self, content):
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
|
Pretty print a JSON object
``content`` JSON object to pretty print
|
train
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L1215-L1228
| null |
class RequestsKeywords(object):
"""``RequestsLibrary`` is a [http://code.google.com/p/robotframework/|Robot Framework] test library that uses the [https://github.com/kennethreitz/requests|Requests] HTTP client.
Here is an example testcase
| ***** Settings ***** | | | | |
| Library | Collections | | | |
| Library | RequestsLibrary | | | |
| ***** Test Cases ***** | | | | |
| Get Requests | | | | |
| | Create Session | github | http://api.github.com | |
| | Create Session | google | http://www.google.com | |
| | ${resp}= | Get Request | google | / |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | ${resp}= | Get Request | github | /users/bulkan |
| | Should Be Equal As Strings | ${resp.status_code} | 200 | |
| | Dictionary Should Contain Value | ${resp.json()} | Bulkan Savun Evcimen | |
"""
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
self._cache = robot.utils.ConnectionCache('No sessions created')
self.builtin = BuiltIn()
self.debug = 0
def _create_session(
self,
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
self.builtin.log('Creating session: %s' % alias, 'DEBUG')
s = session = requests.Session()
s.headers.update(headers)
s.auth = auth if auth else s.auth
s.proxies = proxies if proxies else s.proxies
try:
max_retries = int(max_retries)
except ValueError as err:
raise ValueError("Error converting max_retries parameter: %s" % err)
if max_retries > 0:
http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
# Replace the session's original adapters
s.mount('http://', http)
s.mount('https://', https)
# Disable requests warnings, useful when you have large number of testcase
# you will observe drastical changes in Robot log.html and output.xml files size
if disable_warnings:
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.ERROR)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.ERROR)
requests_log.propagate = True
if not verify:
requests.packages.urllib3.disable_warnings()
# verify can be a Boolean or a String
if isinstance(verify, bool):
s.verify = verify
elif isinstance(verify, str) or isinstance(verify, unicode):
if verify.lower() == 'true' or verify.lower() == 'false':
s.verify = self.builtin.convert_to_boolean(verify)
else:
# String for CA_BUNDLE, not a Boolean String
s.verify = verify
else:
# not a Boolean nor a String
s.verify = verify
# cant pass these into the Session anymore
self.timeout = float(timeout) if timeout is not None else None
self.cookies = cookies
self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None
s.url = url
# Enable http verbosity
if int(debug) >= 1:
self.debug = int(debug)
httplib.HTTPConnection.debuglevel = self.debug
self._cache.register(session, alias=alias)
return session
def create_session(self, alias, url, headers={}, cookies={},
auth=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` List of username & password for HTTP Basic Auth
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_custom_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` A Custom Authentication object to be passed on to the reqests library.
http://docs.python-requests.org/en/master/user/advanced/#custom-authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Custom Authenticated Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, auth, timeout,
proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_ntlm_session(
self,
alias,
url,
auth,
headers={},
cookies={},
timeout=None,
proxies=None,
verify=False,
debug=0,
max_retries=3,
backoff_factor=0.10,
disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
if not HttpNtlmAuth:
raise AssertionError('Requests NTLM module not loaded')
elif len(auth) != 3:
raise AssertionError('Incorrect number of authentication arguments'
' - expected 3, got {}'.format(len(auth)))
else:
ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]),
auth[2])
logger.info('Creating NTLM Session using : alias=%s, url=%s, \
headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \
proxies=%s, verify=%s, debug=%s '
% (alias, url, headers, cookies, ntlm_auth,
timeout, proxies, verify, debug))
return self._create_session(
alias,
url,
headers,
cookies,
ntlm_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_digest_session(self, alias, url, auth, headers={}, cookies={},
timeout=None, proxies=None, verify=False,
debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None
return self._create_session(
alias,
url,
headers,
cookies,
digest_auth,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
def create_client_cert_session(self, alias, url, headers={}, cookies={},
client_certs=None, timeout=None, proxies=None,
verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0):
""" Create Session: create a HTTP session to a server
``url`` Base url of the server
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of default headers
``cookies`` Dictionary of cookies
``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate
``timeout`` Connection timeout
``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication
``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided.
Defaults to False.
``debug`` Enable http verbosity option more information
https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel
``max_retries`` The maximum number of retries each connection should attempt.
``backoff_factor`` The pause between for each retry
``disable_warnings`` Disable requests warning useful when you have large number of testcases
"""
logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \
cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \
debug=%s ' % (alias, url, headers, cookies, client_certs, timeout,
proxies, verify, debug))
session = self._create_session(
alias,
url,
headers,
cookies,
None,
timeout,
max_retries,
backoff_factor,
proxies,
verify,
debug,
disable_warnings)
session.cert = tuple(client_certs)
return session
def delete_all_sessions(self):
""" Removes all the session objects """
logger.info('Delete All Sessions')
self._cache.empty_cache()
def update_session(self, alias, headers=None, cookies=None):
"""Update Session Headers: update a HTTP Session Headers
``alias`` Robot Framework alias to identify the session
``headers`` Dictionary of headers merge into session
"""
session = self._cache.switch(alias)
session.headers = merge_setting(headers, session.headers)
session.cookies = merge_cookies(session.cookies, cookies)
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
def get(
self,
alias,
uri,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Get Request now**
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Get Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, redir, timeout, json)
return response
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
def post(
self,
alias,
uri,
data={},
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Post Request now**
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Post Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def patch_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PATCH data if data is not specified
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Patch Request using : alias=%s, uri=%s, data=%s, \
headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, data, headers, files, redir))
return response
def patch(
self,
alias,
uri,
data={},
headers=None,
files={},
allow_redirects=None,
timeout=None):
""" **Deprecated- See Patch Request now**
Send a PATCH request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PATCH request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PATCH data
or binary data that is sent as the raw body content
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to PATCH to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Patch Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"patch",
session,
uri,
data,
None,
None,
files,
headers,
redir,
timeout)
return response
def put_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
files=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as PUT data
or binary data that is sent as the raw body content
``json`` a value that will be json encoded
and sent as PUT data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``params`` url parameters to append to the uri
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Put Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def put(
self,
alias,
uri,
data=None,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Put Request now**
Send a PUT request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the PUT request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Put Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"put",
session,
uri,
data,
None,
None,
None,
headers,
redir,
timeout)
return response
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
def delete(
self,
alias,
uri,
data=(),
headers=None,
allow_redirects=None,
timeout=None):
""" * * * Deprecated- See Delete Request now * * *
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
logger.warn("Deprecation Warning: Use Delete Request in the future")
session = self._cache.switch(alias)
data = self._utf8_urlencode(data)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, None, headers, redir, timeout)
return response
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
def head(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Head Request now**
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Head Request in the future")
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
return response
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
def options(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" **Deprecated- See Options Request now**
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
logger.warn("Deprecation Warning: Use Options Request in the future")
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
return response
def _get_request(
self,
session,
uri,
params,
headers,
json,
allow_redirects,
timeout):
self._capture_output()
resp = session.get(self._get_url(session, uri),
headers=headers,
json=json,
params=self._utf8_urlencode(params),
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _body_request(
self,
method_name,
session,
uri,
data,
json,
params,
files,
headers,
allow_redirects,
timeout):
self._capture_output()
method = getattr(session, method_name)
resp = method(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
files=files,
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
self.builtin.log(method_name + ' response: ' + resp.text, 'DEBUG')
return resp
def _delete_request(
self,
session,
uri,
data,
json,
params,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.delete(self._get_url(session, uri),
data=data,
json=json,
params=self._utf8_urlencode(params),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _head_request(self, session, uri, headers, allow_redirects, timeout):
self._capture_output()
resp = session.head(self._get_url(session, uri),
headers=headers,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
cookies=self.cookies,
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _options_request(
self,
session,
uri,
headers,
allow_redirects,
timeout):
self._capture_output()
resp = session.options(self._get_url(session, uri),
headers=headers,
cookies=self.cookies,
allow_redirects=allow_redirects,
timeout=self._get_timeout(timeout),
verify=self.verify)
self._print_debug()
# Store the last session object
session.last_resp = resp
return resp
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
def _get_timeout(self, timeout):
return float(timeout) if timeout is not None else self.timeout
def _capture_output(self):
if self.debug >= 1:
self.http_log = WritableObject()
sys.stdout = self.http_log
def _print_debug(self):
if self.debug >= 1:
sys.stdout = sys.__stdout__ # Restore stdout
if PY3:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').replace(
'\'',
'')
else:
debug_info = ''.join(
self.http_log.content).replace(
'\\r',
'').decode('string_escape').replace(
'\'',
'')
# Remove empty lines
debug_info = "\n".join(
[ll.rstrip() for ll in debug_info.splitlines() if ll.strip()])
self.builtin.log(debug_info, 'DEBUG')
def _utf8_urlencode(self, data):
if self._is_string_type(data):
return data.encode('utf-8')
if not isinstance(data, dict):
return data
utf8_data = {}
for k, v in data.items():
if self._is_string_type(v):
v = v.encode('utf-8')
utf8_data[k] = v
return urlencode(utf8_data)
def _format_data_according_to_header(self, session, data, headers):
headers = self._merge_headers(session, headers)
if data is not None and headers is not None and 'Content-Type' in headers and not self._is_json(data):
if headers['Content-Type'].find("application/json") != -1:
if not isinstance(data, types.GeneratorType):
data = json.dumps(data)
elif headers['Content-Type'].find("application/x-www-form-urlencoded") != -1:
data = self._utf8_urlencode(data)
else:
data = self._utf8_urlencode(data)
return data
def _format_data_to_log_string_according_to_header(self, data, headers):
dataStr = "<empty>"
if data is not None and headers is not None and 'Content-Type' in headers:
if (headers['Content-Type'].find("application/json") != -1) or \
(headers['Content-Type'].find("application/x-www-form-urlencoded") != -1):
if isinstance(data, bytes):
dataStr = data.decode('utf-8')
else:
dataStr = data
else:
dataStr = "<" + headers['Content-Type'] + ">"
return dataStr
@staticmethod
def _merge_headers(session, headers):
if headers is None:
headers = {}
else:
headers = headers.copy()
headers.update(session.headers)
return headers
@staticmethod
def _is_json(data):
try:
json.loads(data)
except (TypeError, ValueError):
return False
return True
@staticmethod
def _is_string_type(data):
if PY3 and isinstance(data, str):
return True
elif not PY3 and isinstance(data, unicode):
return True
return False
|
utiasSTARS/pykitti
|
pykitti/tracking.py
|
to_array_list
|
python
|
def to_array_list(df, length=None, by_id=True):
if by_id:
assert 'id' in df.columns
# if `id` is the only column, don't sort it (and don't remove it)
if len(df.columns) == 1:
by_id = False
idx = df.index.unique()
if length is None:
length = max(idx) + 1
l = [np.empty(0) for _ in xrange(length)]
for i in idx:
a = df.loc[i]
if by_id:
if isinstance(a, pd.Series):
a = a[1:]
else:
a = a.copy().set_index('id').sort_index()
l[i] = a.values.reshape((-1, a.shape[-1]))
return np.asarray(l)
|
Converts a dataframe to a list of arrays, with one array for every unique index entry.
Index is assumed to be 0-based contiguous. If there is a missing index entry, an empty
numpy array is returned for it.
Elements in the arrays are sorted by their id.
:param df:
:param length:
:return:
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/tracking.py#L181-L212
| null |
"""Provides 'tracking', which loads and parses tracking benchmark data."""
import datetime as dt
import glob
import os
from collections import namedtuple
import pandas as pd
import numpy as np
import pykitti.utils as utils
import cv2
try:
xrange
except NameError:
xrange = range
__author__ = "Sidney zhang"
__email__ = "sidney@sidazhang.com"
class tracking:
"""Load and parse tracking benchmark data into a usable format."""
def __init__(self, base_path, sequence, **kwargs):
"""Set the path."""
self.base_path = base_path
self.sequence = sequence
self.frames = kwargs.get('frames', None)
# Default image file extension is 'png'
self.imtype = kwargs.get('imtype', 'png')
# Find all the data files
self._get_file_lists()
print('files', len(self.cam2_files))
# Pre-load data that isn't returned as a generator
# self._load_calib()
def __len__(self):
"""Return the number of frames loaded."""
return len(self.timestamps)
@property
def cam2(self):
"""Generator to read image files for cam2 (RGB left)."""
return utils.yield_images(self.cam2_files, mode='RGB')
def get_cam2(self, idx):
"""Read image file for cam2 (RGB left) at the specified index."""
return utils.load_image(self.cam2_files[idx], mode='RGB')
@property
def cam3(self):
"""Generator to read image files for cam0 (RGB right)."""
return utils.yield_images(self.cam3_files, mode='RGB')
def get_cam3(self, idx):
"""Read image file for cam3 (RGB right) at the specified index."""
return utils.load_image(self.cam3_files[idx], mode='RGB')
@property
def gray(self):
"""Generator to read monochrome stereo pairs from file.
"""
return zip(self.cam0, self.cam1)
def get_gray(self, idx):
"""Read monochrome stereo pair at the specified index."""
return (self.get_cam0(idx), self.get_cam1(idx))
@property
def rgb(self):
"""Generator to read RGB stereo pairs from file.
"""
return zip(self.cam2, self.cam3)
def get_rgb(self, idx):
"""Read RGB stereo pair at the specified index."""
return (self.get_cam2(idx), self.get_cam3(idx))
@property
def velo(self):
"""Generator to read velodyne [x,y,z,reflectance] scan data from binary files."""
# Return a generator yielding Velodyne scans.
# Each scan is a Nx4 array of [x,y,z,reflectance]
return utils.yield_velo_scans(self.velo_files)
def get_velo(self, idx):
"""Read velodyne [x,y,z,reflectance] scan at the specified index."""
return utils.load_velo_scan(self.velo_files[idx])
def _get_file_lists(self):
"""Find and list data files for each sensor."""
self.cam2_files = sorted(glob.glob(
os.path.join(self.base_path,
'image_02',
self.sequence,
'*.{}'.format(self.imtype))))
self.cam3_files = sorted(glob.glob(
os.path.join(self.base_path,
'image_03',
self.sequence,
'*.{}'.format(self.imtype))))
self.velo_files = sorted(glob.glob(
os.path.join(self.base_path,
'velodyne',
self.sequence,
'*.bin')))
# Subselect the chosen range of frames, if any
if self.frames is not None:
self.cam0_files = utils.subselect_files(
self.cam0_files, self.frames)
self.cam1_files = utils.subselect_files(
self.cam1_files, self.frames)
self.cam2_files = utils.subselect_files(
self.cam2_files, self.frames)
self.cam3_files = utils.subselect_files(
self.cam3_files, self.frames)
self.velo_files = utils.subselect_files(
self.velo_files, self.frames)
def _load_calib(self):
"""Load and compute intrinsic and extrinsic calibration parameters."""
# We'll build the calibration parameters as a dictionary, then
# convert it to a namedtuple to prevent it from being modified later
data = {}
# Load the calibration file
calib_filepath = os.path.join(self.sequence_path + '.txt', 'calib.txt')
filedata = utils.read_calib_file(calib_filepath)
# Create 3x4 projection matrices
P_rect_00 = np.reshape(filedata['P0'], (3, 4))
P_rect_10 = np.reshape(filedata['P1'], (3, 4))
P_rect_20 = np.reshape(filedata['P2'], (3, 4))
P_rect_30 = np.reshape(filedata['P3'], (3, 4))
data['P_rect_00'] = P_rect_00
data['P_rect_10'] = P_rect_10
data['P_rect_20'] = P_rect_20
data['P_rect_30'] = P_rect_30
# Compute the rectified extrinsics from cam0 to camN
T1 = np.eye(4)
T1[0, 3] = P_rect_10[0, 3] / P_rect_10[0, 0]
T2 = np.eye(4)
T2[0, 3] = P_rect_20[0, 3] / P_rect_20[0, 0]
T3 = np.eye(4)
T3[0, 3] = P_rect_30[0, 3] / P_rect_30[0, 0]
# Compute the velodyne to rectified camera coordinate transforms
data['T_cam0_velo'] = np.reshape(filedata['Tr'], (3, 4))
data['T_cam0_velo'] = np.vstack([data['T_cam0_velo'], [0, 0, 0, 1]])
data['T_cam1_velo'] = T1.dot(data['T_cam0_velo'])
data['T_cam2_velo'] = T2.dot(data['T_cam0_velo'])
data['T_cam3_velo'] = T3.dot(data['T_cam0_velo'])
# Compute the camera intrinsics
data['K_cam0'] = P_rect_00[0:3, 0:3]
data['K_cam1'] = P_rect_10[0:3, 0:3]
data['K_cam2'] = P_rect_20[0:3, 0:3]
data['K_cam3'] = P_rect_30[0:3, 0:3]
# Compute the stereo baselines in meters by projecting the origin of
# each camera frame into the velodyne frame and computing the distances
# between them
p_cam = np.array([0, 0, 0, 1])
p_velo0 = np.linalg.inv(data['T_cam0_velo']).dot(p_cam)
p_velo1 = np.linalg.inv(data['T_cam1_velo']).dot(p_cam)
p_velo2 = np.linalg.inv(data['T_cam2_velo']).dot(p_cam)
p_velo3 = np.linalg.inv(data['T_cam3_velo']).dot(p_cam)
data['b_gray'] = np.linalg.norm(p_velo1 - p_velo0) # gray baseline
data['b_rgb'] = np.linalg.norm(p_velo3 - p_velo2) # rgb baseline
self.calib = namedtuple('CalibData', data.keys())(*data.values())
# TODO: Acknowledge this is from HART
class KittiTrackingLabels(object):
"""Kitt Tracking Label parser. It can limit the maximum number of objects per track,
filter out objects with class "DontCare", or retain only those objects present
in a given frame.
"""
columns = 'id class truncated occluded alpha x1 y1 x2 y2 xd yd zd x y z roty score'.split()
classes = 'Car Van Truck Pedestrian Person_sitting Cyclist Tram Misc DontCare'.split()
def __init__(self, path_or_df, bbox_with_size=True, remove_dontcare=True, split_on_reappear=True):
if isinstance(path_or_df, pd.DataFrame):
self._df = path_or_df
else:
if not os.path.exists(path_or_df):
raise ValueError('File {} doesn\'t exist'.format(path_or_df))
self._df = pd.read_csv(path_or_df, sep=' ', header=None,
index_col=0, skip_blank_lines=True)
# Detection files have 1 more column than label files
# label file has 16 columns
# detection file has 17 columns (the last column is score)
# Here it checks the number of columns the df has and sets the
# column names on the df accordingly
self._df.columns = self.columns[:len(self._df.columns)]
self.bbox_with_size = bbox_with_size
if remove_dontcare:
self._df = self._df[self._df['class'] != 'DontCare']
for c in self._df.columns:
self._convert_type(c, np.float32, np.float64)
self._convert_type(c, np.int32, np.int64)
# TODO: Add occlusion filtering back in
truncated_threshold=(0, 2.)
occluded_threshold=(0, 3.)
# if not nest.is_sequence(occluded_threshold):
# occluded_threshold = (0, occluded_threshold)
#
# if not nest.is_sequence(truncated_threshold):
# truncated_threshold = (0, truncated_threshold)
# TODO: Add occlusion filteringinback in
# self._df = self._df[self._df['occluded'] >= occluded_threshold[0]]
# self._df = self._df[self._df['occluded'] <= occluded_threshold[1]]
#
# self._df = self._df[self._df['truncated'] >= truncated_threshold[0]]
# self._df = self._df[self._df['truncated'] <= truncated_threshold[1]]
# make 0-based contiguous ids
ids = self._df.id.unique()
offset = max(ids) + 1
id_map = {id: new_id for id, new_id in zip(ids, np.arange(offset, len(ids) + offset))}
self._df.replace({'id': id_map}, inplace=True)
self._df.id -= offset
self.ids = list(self._df.id.unique())
self.max_objects = len(self.ids)
self.index = self._df.index.unique()
if split_on_reappear:
added_ids = self._split_on_reappear(self._df, self.presence, self.ids[-1])
self.ids.extend(added_ids)
self.max_objects += len(added_ids)
def _convert_type(self, column, dest_type, only_from_type=None):
cond = only_from_type is None or self._df[column].dtype == only_from_type
if cond:
self._df[column] = self._df[column].astype(dest_type)
@property
def bbox(self):
bbox = self._df[['id', 'x1', 'y1', 'x2', 'y2']].copy()
# TODO: Fix this to become x, y, w, h
if self.bbox_with_size:
bbox['y2'] -= bbox['y1']
bbox['x2'] -= bbox['x1']
"""Converts a dataframe to a list of arrays
:param df:
:param length:
:return:
"""
return to_array_list(bbox)
@property
def presence(self):
return self._presence(self._df, self.index, self.max_objects)
@property
def num_objects(self):
ns = self._df.id.groupby(self._df.index).count()
absent = list(set(range(len(self))) - set(self.index))
other = pd.DataFrame([0] * len(absent), absent)
ns = ns.append(other)
ns.sort_index(inplace=True)
return ns.as_matrix().squeeze()
@property
def cls(self):
return to_array_list(self._df[['id', 'class']])
@property
def occlusion(self):
return to_array_list(self._df[['id', 'occluded']])
@property
def id(self):
return to_array_list(self._df['id'])
def __len__(self):
return self.index[-1] - self.index[0] + 1
@classmethod
def _presence(cls, df, index, n_objects):
p = np.zeros((index[-1] + 1, n_objects), dtype=bool)
for i, row in df.iterrows():
p[i, row.id] = True
return p
@classmethod
def _split_on_reappear(cls, df, p, id_offset):
"""Assign a new identity to an objects that appears after disappearing previously.
Works on `df` in-place.
:param df: data frame
:param p: presence
:param id_offset: offset added to new ids
:return:
"""
next_id = id_offset + 1
added_ids = []
nt = p.sum(0)
start = np.argmax(p, 0)
end = np.argmax(np.cumsum(p, 0), 0)
diff = end - start + 1
is_contiguous = np.equal(nt, diff)
for id, contiguous in enumerate(is_contiguous):
if not contiguous:
to_change = df[df.id == id]
index = to_change.index
diff = index[1:] - index[:-1]
where = np.where(np.greater(diff, 1))[0]
for w in where:
to_change.loc[w + 1:, 'id'] = next_id
added_ids.append(next_id)
next_id += 1
df[df.id == id] = to_change
return added_ids
|
utiasSTARS/pykitti
|
pykitti/tracking.py
|
KittiTrackingLabels.bbox
|
python
|
def bbox(self):
bbox = self._df[['id', 'x1', 'y1', 'x2', 'y2']].copy()
# TODO: Fix this to become x, y, w, h
if self.bbox_with_size:
bbox['y2'] -= bbox['y1']
bbox['x2'] -= bbox['x1']
return to_array_list(bbox)
|
Converts a dataframe to a list of arrays
:param df:
:param length:
:return:
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/tracking.py#L291-L304
|
[
"def to_array_list(df, length=None, by_id=True):\n \"\"\"Converts a dataframe to a list of arrays, with one array for every unique index entry.\n Index is assumed to be 0-based contiguous. If there is a missing index entry, an empty\n numpy array is returned for it.\n Elements in the arrays are sorted by their id.\n :param df:\n :param length:\n :return:\n \"\"\"\n\n if by_id:\n assert 'id' in df.columns\n\n # if `id` is the only column, don't sort it (and don't remove it)\n if len(df.columns) == 1:\n by_id = False\n\n idx = df.index.unique()\n if length is None:\n length = max(idx) + 1\n\n l = [np.empty(0) for _ in xrange(length)]\n for i in idx:\n a = df.loc[i]\n if by_id:\n if isinstance(a, pd.Series):\n a = a[1:]\n else:\n a = a.copy().set_index('id').sort_index()\n\n l[i] = a.values.reshape((-1, a.shape[-1]))\n return np.asarray(l)\n"
] |
class KittiTrackingLabels(object):
"""Kitt Tracking Label parser. It can limit the maximum number of objects per track,
filter out objects with class "DontCare", or retain only those objects present
in a given frame.
"""
columns = 'id class truncated occluded alpha x1 y1 x2 y2 xd yd zd x y z roty score'.split()
classes = 'Car Van Truck Pedestrian Person_sitting Cyclist Tram Misc DontCare'.split()
def __init__(self, path_or_df, bbox_with_size=True, remove_dontcare=True, split_on_reappear=True):
if isinstance(path_or_df, pd.DataFrame):
self._df = path_or_df
else:
if not os.path.exists(path_or_df):
raise ValueError('File {} doesn\'t exist'.format(path_or_df))
self._df = pd.read_csv(path_or_df, sep=' ', header=None,
index_col=0, skip_blank_lines=True)
# Detection files have 1 more column than label files
# label file has 16 columns
# detection file has 17 columns (the last column is score)
# Here it checks the number of columns the df has and sets the
# column names on the df accordingly
self._df.columns = self.columns[:len(self._df.columns)]
self.bbox_with_size = bbox_with_size
if remove_dontcare:
self._df = self._df[self._df['class'] != 'DontCare']
for c in self._df.columns:
self._convert_type(c, np.float32, np.float64)
self._convert_type(c, np.int32, np.int64)
# TODO: Add occlusion filtering back in
truncated_threshold=(0, 2.)
occluded_threshold=(0, 3.)
# if not nest.is_sequence(occluded_threshold):
# occluded_threshold = (0, occluded_threshold)
#
# if not nest.is_sequence(truncated_threshold):
# truncated_threshold = (0, truncated_threshold)
# TODO: Add occlusion filteringinback in
# self._df = self._df[self._df['occluded'] >= occluded_threshold[0]]
# self._df = self._df[self._df['occluded'] <= occluded_threshold[1]]
#
# self._df = self._df[self._df['truncated'] >= truncated_threshold[0]]
# self._df = self._df[self._df['truncated'] <= truncated_threshold[1]]
# make 0-based contiguous ids
ids = self._df.id.unique()
offset = max(ids) + 1
id_map = {id: new_id for id, new_id in zip(ids, np.arange(offset, len(ids) + offset))}
self._df.replace({'id': id_map}, inplace=True)
self._df.id -= offset
self.ids = list(self._df.id.unique())
self.max_objects = len(self.ids)
self.index = self._df.index.unique()
if split_on_reappear:
added_ids = self._split_on_reappear(self._df, self.presence, self.ids[-1])
self.ids.extend(added_ids)
self.max_objects += len(added_ids)
def _convert_type(self, column, dest_type, only_from_type=None):
cond = only_from_type is None or self._df[column].dtype == only_from_type
if cond:
self._df[column] = self._df[column].astype(dest_type)
@property
@property
def presence(self):
return self._presence(self._df, self.index, self.max_objects)
@property
def num_objects(self):
ns = self._df.id.groupby(self._df.index).count()
absent = list(set(range(len(self))) - set(self.index))
other = pd.DataFrame([0] * len(absent), absent)
ns = ns.append(other)
ns.sort_index(inplace=True)
return ns.as_matrix().squeeze()
@property
def cls(self):
return to_array_list(self._df[['id', 'class']])
@property
def occlusion(self):
return to_array_list(self._df[['id', 'occluded']])
@property
def id(self):
return to_array_list(self._df['id'])
def __len__(self):
return self.index[-1] - self.index[0] + 1
@classmethod
def _presence(cls, df, index, n_objects):
p = np.zeros((index[-1] + 1, n_objects), dtype=bool)
for i, row in df.iterrows():
p[i, row.id] = True
return p
@classmethod
def _split_on_reappear(cls, df, p, id_offset):
"""Assign a new identity to an objects that appears after disappearing previously.
Works on `df` in-place.
:param df: data frame
:param p: presence
:param id_offset: offset added to new ids
:return:
"""
next_id = id_offset + 1
added_ids = []
nt = p.sum(0)
start = np.argmax(p, 0)
end = np.argmax(np.cumsum(p, 0), 0)
diff = end - start + 1
is_contiguous = np.equal(nt, diff)
for id, contiguous in enumerate(is_contiguous):
if not contiguous:
to_change = df[df.id == id]
index = to_change.index
diff = index[1:] - index[:-1]
where = np.where(np.greater(diff, 1))[0]
for w in where:
to_change.loc[w + 1:, 'id'] = next_id
added_ids.append(next_id)
next_id += 1
df[df.id == id] = to_change
return added_ids
|
utiasSTARS/pykitti
|
pykitti/tracking.py
|
KittiTrackingLabels._split_on_reappear
|
python
|
def _split_on_reappear(cls, df, p, id_offset):
next_id = id_offset + 1
added_ids = []
nt = p.sum(0)
start = np.argmax(p, 0)
end = np.argmax(np.cumsum(p, 0), 0)
diff = end - start + 1
is_contiguous = np.equal(nt, diff)
for id, contiguous in enumerate(is_contiguous):
if not contiguous:
to_change = df[df.id == id]
index = to_change.index
diff = index[1:] - index[:-1]
where = np.where(np.greater(diff, 1))[0]
for w in where:
to_change.loc[w + 1:, 'id'] = next_id
added_ids.append(next_id)
next_id += 1
df[df.id == id] = to_change
return added_ids
|
Assign a new identity to an objects that appears after disappearing previously.
Works on `df` in-place.
:param df: data frame
:param p: presence
:param id_offset: offset added to new ids
:return:
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/tracking.py#L342-L372
| null |
class KittiTrackingLabels(object):
"""Kitt Tracking Label parser. It can limit the maximum number of objects per track,
filter out objects with class "DontCare", or retain only those objects present
in a given frame.
"""
columns = 'id class truncated occluded alpha x1 y1 x2 y2 xd yd zd x y z roty score'.split()
classes = 'Car Van Truck Pedestrian Person_sitting Cyclist Tram Misc DontCare'.split()
def __init__(self, path_or_df, bbox_with_size=True, remove_dontcare=True, split_on_reappear=True):
if isinstance(path_or_df, pd.DataFrame):
self._df = path_or_df
else:
if not os.path.exists(path_or_df):
raise ValueError('File {} doesn\'t exist'.format(path_or_df))
self._df = pd.read_csv(path_or_df, sep=' ', header=None,
index_col=0, skip_blank_lines=True)
# Detection files have 1 more column than label files
# label file has 16 columns
# detection file has 17 columns (the last column is score)
# Here it checks the number of columns the df has and sets the
# column names on the df accordingly
self._df.columns = self.columns[:len(self._df.columns)]
self.bbox_with_size = bbox_with_size
if remove_dontcare:
self._df = self._df[self._df['class'] != 'DontCare']
for c in self._df.columns:
self._convert_type(c, np.float32, np.float64)
self._convert_type(c, np.int32, np.int64)
# TODO: Add occlusion filtering back in
truncated_threshold=(0, 2.)
occluded_threshold=(0, 3.)
# if not nest.is_sequence(occluded_threshold):
# occluded_threshold = (0, occluded_threshold)
#
# if not nest.is_sequence(truncated_threshold):
# truncated_threshold = (0, truncated_threshold)
# TODO: Add occlusion filteringinback in
# self._df = self._df[self._df['occluded'] >= occluded_threshold[0]]
# self._df = self._df[self._df['occluded'] <= occluded_threshold[1]]
#
# self._df = self._df[self._df['truncated'] >= truncated_threshold[0]]
# self._df = self._df[self._df['truncated'] <= truncated_threshold[1]]
# make 0-based contiguous ids
ids = self._df.id.unique()
offset = max(ids) + 1
id_map = {id: new_id for id, new_id in zip(ids, np.arange(offset, len(ids) + offset))}
self._df.replace({'id': id_map}, inplace=True)
self._df.id -= offset
self.ids = list(self._df.id.unique())
self.max_objects = len(self.ids)
self.index = self._df.index.unique()
if split_on_reappear:
added_ids = self._split_on_reappear(self._df, self.presence, self.ids[-1])
self.ids.extend(added_ids)
self.max_objects += len(added_ids)
def _convert_type(self, column, dest_type, only_from_type=None):
cond = only_from_type is None or self._df[column].dtype == only_from_type
if cond:
self._df[column] = self._df[column].astype(dest_type)
@property
def bbox(self):
bbox = self._df[['id', 'x1', 'y1', 'x2', 'y2']].copy()
# TODO: Fix this to become x, y, w, h
if self.bbox_with_size:
bbox['y2'] -= bbox['y1']
bbox['x2'] -= bbox['x1']
"""Converts a dataframe to a list of arrays
:param df:
:param length:
:return:
"""
return to_array_list(bbox)
@property
def presence(self):
return self._presence(self._df, self.index, self.max_objects)
@property
def num_objects(self):
ns = self._df.id.groupby(self._df.index).count()
absent = list(set(range(len(self))) - set(self.index))
other = pd.DataFrame([0] * len(absent), absent)
ns = ns.append(other)
ns.sort_index(inplace=True)
return ns.as_matrix().squeeze()
@property
def cls(self):
return to_array_list(self._df[['id', 'class']])
@property
def occlusion(self):
return to_array_list(self._df[['id', 'occluded']])
@property
def id(self):
return to_array_list(self._df['id'])
def __len__(self):
return self.index[-1] - self.index[0] + 1
@classmethod
def _presence(cls, df, index, n_objects):
p = np.zeros((index[-1] + 1, n_objects), dtype=bool)
for i, row in df.iterrows():
p[i, row.id] = True
return p
@classmethod
|
utiasSTARS/pykitti
|
pykitti/odometry.py
|
odometry._get_file_lists
|
python
|
def _get_file_lists(self):
self.cam0_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'image_0',
'*.{}'.format(self.imtype))))
self.cam1_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'image_1',
'*.{}'.format(self.imtype))))
self.cam2_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'image_2',
'*.{}'.format(self.imtype))))
self.cam3_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'image_3',
'*.{}'.format(self.imtype))))
self.velo_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'velodyne',
'*.bin')))
# Subselect the chosen range of frames, if any
if self.frames is not None:
self.cam0_files = utils.subselect_files(
self.cam0_files, self.frames)
self.cam1_files = utils.subselect_files(
self.cam1_files, self.frames)
self.cam2_files = utils.subselect_files(
self.cam2_files, self.frames)
self.cam3_files = utils.subselect_files(
self.cam3_files, self.frames)
self.velo_files = utils.subselect_files(
self.velo_files, self.frames)
|
Find and list data files for each sensor.
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/odometry.py#L114-L143
|
[
"def subselect_files(files, indices):\n try:\n files = [files[i] for i in indices]\n except:\n pass\n return files\n"
] |
class odometry:
"""Load and parse odometry benchmark data into a usable format."""
def __init__(self, base_path, sequence, **kwargs):
"""Set the path."""
self.sequence = sequence
self.sequence_path = os.path.join(base_path, 'sequences', sequence)
self.pose_path = os.path.join(base_path, 'poses')
self.frames = kwargs.get('frames', None)
# Default image file extension is 'png'
self.imtype = kwargs.get('imtype', 'png')
# Find all the data files
self._get_file_lists()
# Pre-load data that isn't returned as a generator
self._load_calib()
self._load_timestamps()
self._load_poses()
def __len__(self):
"""Return the number of frames loaded."""
return len(self.timestamps)
@property
def cam0(self):
"""Generator to read image files for cam0 (monochrome left)."""
return utils.yield_images(self.cam0_files, mode='L')
def get_cam0(self, idx):
"""Read image file for cam0 (monochrome left) at the specified index."""
return utils.load_image(self.cam0_files[idx], mode='L')
@property
def cam1(self):
"""Generator to read image files for cam1 (monochrome right)."""
return utils.yield_images(self.cam1_files, mode='L')
def get_cam1(self, idx):
"""Read image file for cam1 (monochrome right) at the specified index."""
return utils.load_image(self.cam1_files[idx], mode='L')
@property
def cam2(self):
"""Generator to read image files for cam2 (RGB left)."""
return utils.yield_images(self.cam2_files, mode='RGB')
def get_cam2(self, idx):
"""Read image file for cam2 (RGB left) at the specified index."""
return utils.load_image(self.cam2_files[idx], mode='RGB')
@property
def cam3(self):
"""Generator to read image files for cam0 (RGB right)."""
return utils.yield_images(self.cam3_files, mode='RGB')
def get_cam3(self, idx):
"""Read image file for cam3 (RGB right) at the specified index."""
return utils.load_image(self.cam3_files[idx], mode='RGB')
@property
def gray(self):
"""Generator to read monochrome stereo pairs from file.
"""
return zip(self.cam0, self.cam1)
def get_gray(self, idx):
"""Read monochrome stereo pair at the specified index."""
return (self.get_cam0(idx), self.get_cam1(idx))
@property
def rgb(self):
"""Generator to read RGB stereo pairs from file.
"""
return zip(self.cam2, self.cam3)
def get_rgb(self, idx):
"""Read RGB stereo pair at the specified index."""
return (self.get_cam2(idx), self.get_cam3(idx))
@property
def velo(self):
"""Generator to read velodyne [x,y,z,reflectance] scan data from binary files."""
# Return a generator yielding Velodyne scans.
# Each scan is a Nx4 array of [x,y,z,reflectance]
return utils.yield_velo_scans(self.velo_files)
def get_velo(self, idx):
"""Read velodyne [x,y,z,reflectance] scan at the specified index."""
return utils.load_velo_scan(self.velo_files[idx])
def _load_calib(self):
"""Load and compute intrinsic and extrinsic calibration parameters."""
# We'll build the calibration parameters as a dictionary, then
# convert it to a namedtuple to prevent it from being modified later
data = {}
# Load the calibration file
calib_filepath = os.path.join(self.sequence_path, 'calib.txt')
filedata = utils.read_calib_file(calib_filepath)
# Create 3x4 projection matrices
P_rect_00 = np.reshape(filedata['P0'], (3, 4))
P_rect_10 = np.reshape(filedata['P1'], (3, 4))
P_rect_20 = np.reshape(filedata['P2'], (3, 4))
P_rect_30 = np.reshape(filedata['P3'], (3, 4))
data['P_rect_00'] = P_rect_00
data['P_rect_10'] = P_rect_10
data['P_rect_20'] = P_rect_20
data['P_rect_30'] = P_rect_30
# Compute the rectified extrinsics from cam0 to camN
T1 = np.eye(4)
T1[0, 3] = P_rect_10[0, 3] / P_rect_10[0, 0]
T2 = np.eye(4)
T2[0, 3] = P_rect_20[0, 3] / P_rect_20[0, 0]
T3 = np.eye(4)
T3[0, 3] = P_rect_30[0, 3] / P_rect_30[0, 0]
# Compute the velodyne to rectified camera coordinate transforms
data['T_cam0_velo'] = np.reshape(filedata['Tr'], (3, 4))
data['T_cam0_velo'] = np.vstack([data['T_cam0_velo'], [0, 0, 0, 1]])
data['T_cam1_velo'] = T1.dot(data['T_cam0_velo'])
data['T_cam2_velo'] = T2.dot(data['T_cam0_velo'])
data['T_cam3_velo'] = T3.dot(data['T_cam0_velo'])
# Compute the camera intrinsics
data['K_cam0'] = P_rect_00[0:3, 0:3]
data['K_cam1'] = P_rect_10[0:3, 0:3]
data['K_cam2'] = P_rect_20[0:3, 0:3]
data['K_cam3'] = P_rect_30[0:3, 0:3]
# Compute the stereo baselines in meters by projecting the origin of
# each camera frame into the velodyne frame and computing the distances
# between them
p_cam = np.array([0, 0, 0, 1])
p_velo0 = np.linalg.inv(data['T_cam0_velo']).dot(p_cam)
p_velo1 = np.linalg.inv(data['T_cam1_velo']).dot(p_cam)
p_velo2 = np.linalg.inv(data['T_cam2_velo']).dot(p_cam)
p_velo3 = np.linalg.inv(data['T_cam3_velo']).dot(p_cam)
data['b_gray'] = np.linalg.norm(p_velo1 - p_velo0) # gray baseline
data['b_rgb'] = np.linalg.norm(p_velo3 - p_velo2) # rgb baseline
self.calib = namedtuple('CalibData', data.keys())(*data.values())
def _load_timestamps(self):
"""Load timestamps from file."""
timestamp_file = os.path.join(self.sequence_path, 'times.txt')
# Read and parse the timestamps
self.timestamps = []
with open(timestamp_file, 'r') as f:
for line in f.readlines():
t = dt.timedelta(seconds=float(line))
self.timestamps.append(t)
# Subselect the chosen range of frames, if any
if self.frames is not None:
self.timestamps = [self.timestamps[i] for i in self.frames]
def _load_poses(self):
"""Load ground truth poses (T_w_cam0) from file."""
pose_file = os.path.join(self.pose_path, self.sequence + '.txt')
# Read and parse the poses
poses = []
try:
with open(pose_file, 'r') as f:
lines = f.readlines()
if self.frames is not None:
lines = [lines[i] for i in self.frames]
for line in lines:
T_w_cam0 = np.fromstring(line, dtype=float, sep=' ')
T_w_cam0 = T_w_cam0.reshape(3, 4)
T_w_cam0 = np.vstack((T_w_cam0, [0, 0, 0, 1]))
poses.append(T_w_cam0)
except FileNotFoundError:
print('Ground truth poses are not available for sequence ' +
self.sequence + '.')
self.poses = poses
|
utiasSTARS/pykitti
|
pykitti/odometry.py
|
odometry._load_calib
|
python
|
def _load_calib(self):
# We'll build the calibration parameters as a dictionary, then
# convert it to a namedtuple to prevent it from being modified later
data = {}
# Load the calibration file
calib_filepath = os.path.join(self.sequence_path, 'calib.txt')
filedata = utils.read_calib_file(calib_filepath)
# Create 3x4 projection matrices
P_rect_00 = np.reshape(filedata['P0'], (3, 4))
P_rect_10 = np.reshape(filedata['P1'], (3, 4))
P_rect_20 = np.reshape(filedata['P2'], (3, 4))
P_rect_30 = np.reshape(filedata['P3'], (3, 4))
data['P_rect_00'] = P_rect_00
data['P_rect_10'] = P_rect_10
data['P_rect_20'] = P_rect_20
data['P_rect_30'] = P_rect_30
# Compute the rectified extrinsics from cam0 to camN
T1 = np.eye(4)
T1[0, 3] = P_rect_10[0, 3] / P_rect_10[0, 0]
T2 = np.eye(4)
T2[0, 3] = P_rect_20[0, 3] / P_rect_20[0, 0]
T3 = np.eye(4)
T3[0, 3] = P_rect_30[0, 3] / P_rect_30[0, 0]
# Compute the velodyne to rectified camera coordinate transforms
data['T_cam0_velo'] = np.reshape(filedata['Tr'], (3, 4))
data['T_cam0_velo'] = np.vstack([data['T_cam0_velo'], [0, 0, 0, 1]])
data['T_cam1_velo'] = T1.dot(data['T_cam0_velo'])
data['T_cam2_velo'] = T2.dot(data['T_cam0_velo'])
data['T_cam3_velo'] = T3.dot(data['T_cam0_velo'])
# Compute the camera intrinsics
data['K_cam0'] = P_rect_00[0:3, 0:3]
data['K_cam1'] = P_rect_10[0:3, 0:3]
data['K_cam2'] = P_rect_20[0:3, 0:3]
data['K_cam3'] = P_rect_30[0:3, 0:3]
# Compute the stereo baselines in meters by projecting the origin of
# each camera frame into the velodyne frame and computing the distances
# between them
p_cam = np.array([0, 0, 0, 1])
p_velo0 = np.linalg.inv(data['T_cam0_velo']).dot(p_cam)
p_velo1 = np.linalg.inv(data['T_cam1_velo']).dot(p_cam)
p_velo2 = np.linalg.inv(data['T_cam2_velo']).dot(p_cam)
p_velo3 = np.linalg.inv(data['T_cam3_velo']).dot(p_cam)
data['b_gray'] = np.linalg.norm(p_velo1 - p_velo0) # gray baseline
data['b_rgb'] = np.linalg.norm(p_velo3 - p_velo2) # rgb baseline
self.calib = namedtuple('CalibData', data.keys())(*data.values())
|
Load and compute intrinsic and extrinsic calibration parameters.
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/odometry.py#L145-L199
|
[
"def read_calib_file(filepath):\n \"\"\"Read in a calibration file and parse into a dictionary.\"\"\"\n data = {}\n\n with open(filepath, 'r') as f:\n for line in f.readlines():\n key, value = line.split(':', 1)\n # The only non-float values in these files are dates, which\n # we don't care about anyway\n try:\n data[key] = np.array([float(x) for x in value.split()])\n except ValueError:\n pass\n\n return data\n"
] |
class odometry:
"""Load and parse odometry benchmark data into a usable format."""
def __init__(self, base_path, sequence, **kwargs):
"""Set the path."""
self.sequence = sequence
self.sequence_path = os.path.join(base_path, 'sequences', sequence)
self.pose_path = os.path.join(base_path, 'poses')
self.frames = kwargs.get('frames', None)
# Default image file extension is 'png'
self.imtype = kwargs.get('imtype', 'png')
# Find all the data files
self._get_file_lists()
# Pre-load data that isn't returned as a generator
self._load_calib()
self._load_timestamps()
self._load_poses()
def __len__(self):
"""Return the number of frames loaded."""
return len(self.timestamps)
@property
def cam0(self):
"""Generator to read image files for cam0 (monochrome left)."""
return utils.yield_images(self.cam0_files, mode='L')
def get_cam0(self, idx):
"""Read image file for cam0 (monochrome left) at the specified index."""
return utils.load_image(self.cam0_files[idx], mode='L')
@property
def cam1(self):
"""Generator to read image files for cam1 (monochrome right)."""
return utils.yield_images(self.cam1_files, mode='L')
def get_cam1(self, idx):
"""Read image file for cam1 (monochrome right) at the specified index."""
return utils.load_image(self.cam1_files[idx], mode='L')
@property
def cam2(self):
"""Generator to read image files for cam2 (RGB left)."""
return utils.yield_images(self.cam2_files, mode='RGB')
def get_cam2(self, idx):
"""Read image file for cam2 (RGB left) at the specified index."""
return utils.load_image(self.cam2_files[idx], mode='RGB')
@property
def cam3(self):
"""Generator to read image files for cam0 (RGB right)."""
return utils.yield_images(self.cam3_files, mode='RGB')
def get_cam3(self, idx):
"""Read image file for cam3 (RGB right) at the specified index."""
return utils.load_image(self.cam3_files[idx], mode='RGB')
@property
def gray(self):
"""Generator to read monochrome stereo pairs from file.
"""
return zip(self.cam0, self.cam1)
def get_gray(self, idx):
"""Read monochrome stereo pair at the specified index."""
return (self.get_cam0(idx), self.get_cam1(idx))
@property
def rgb(self):
"""Generator to read RGB stereo pairs from file.
"""
return zip(self.cam2, self.cam3)
def get_rgb(self, idx):
"""Read RGB stereo pair at the specified index."""
return (self.get_cam2(idx), self.get_cam3(idx))
@property
def velo(self):
"""Generator to read velodyne [x,y,z,reflectance] scan data from binary files."""
# Return a generator yielding Velodyne scans.
# Each scan is a Nx4 array of [x,y,z,reflectance]
return utils.yield_velo_scans(self.velo_files)
def get_velo(self, idx):
"""Read velodyne [x,y,z,reflectance] scan at the specified index."""
return utils.load_velo_scan(self.velo_files[idx])
def _get_file_lists(self):
"""Find and list data files for each sensor."""
self.cam0_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'image_0',
'*.{}'.format(self.imtype))))
self.cam1_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'image_1',
'*.{}'.format(self.imtype))))
self.cam2_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'image_2',
'*.{}'.format(self.imtype))))
self.cam3_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'image_3',
'*.{}'.format(self.imtype))))
self.velo_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'velodyne',
'*.bin')))
# Subselect the chosen range of frames, if any
if self.frames is not None:
self.cam0_files = utils.subselect_files(
self.cam0_files, self.frames)
self.cam1_files = utils.subselect_files(
self.cam1_files, self.frames)
self.cam2_files = utils.subselect_files(
self.cam2_files, self.frames)
self.cam3_files = utils.subselect_files(
self.cam3_files, self.frames)
self.velo_files = utils.subselect_files(
self.velo_files, self.frames)
def _load_timestamps(self):
"""Load timestamps from file."""
timestamp_file = os.path.join(self.sequence_path, 'times.txt')
# Read and parse the timestamps
self.timestamps = []
with open(timestamp_file, 'r') as f:
for line in f.readlines():
t = dt.timedelta(seconds=float(line))
self.timestamps.append(t)
# Subselect the chosen range of frames, if any
if self.frames is not None:
self.timestamps = [self.timestamps[i] for i in self.frames]
def _load_poses(self):
"""Load ground truth poses (T_w_cam0) from file."""
pose_file = os.path.join(self.pose_path, self.sequence + '.txt')
# Read and parse the poses
poses = []
try:
with open(pose_file, 'r') as f:
lines = f.readlines()
if self.frames is not None:
lines = [lines[i] for i in self.frames]
for line in lines:
T_w_cam0 = np.fromstring(line, dtype=float, sep=' ')
T_w_cam0 = T_w_cam0.reshape(3, 4)
T_w_cam0 = np.vstack((T_w_cam0, [0, 0, 0, 1]))
poses.append(T_w_cam0)
except FileNotFoundError:
print('Ground truth poses are not available for sequence ' +
self.sequence + '.')
self.poses = poses
|
utiasSTARS/pykitti
|
pykitti/odometry.py
|
odometry._load_timestamps
|
python
|
def _load_timestamps(self):
timestamp_file = os.path.join(self.sequence_path, 'times.txt')
# Read and parse the timestamps
self.timestamps = []
with open(timestamp_file, 'r') as f:
for line in f.readlines():
t = dt.timedelta(seconds=float(line))
self.timestamps.append(t)
# Subselect the chosen range of frames, if any
if self.frames is not None:
self.timestamps = [self.timestamps[i] for i in self.frames]
|
Load timestamps from file.
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/odometry.py#L201-L214
| null |
class odometry:
"""Load and parse odometry benchmark data into a usable format."""
def __init__(self, base_path, sequence, **kwargs):
"""Set the path."""
self.sequence = sequence
self.sequence_path = os.path.join(base_path, 'sequences', sequence)
self.pose_path = os.path.join(base_path, 'poses')
self.frames = kwargs.get('frames', None)
# Default image file extension is 'png'
self.imtype = kwargs.get('imtype', 'png')
# Find all the data files
self._get_file_lists()
# Pre-load data that isn't returned as a generator
self._load_calib()
self._load_timestamps()
self._load_poses()
def __len__(self):
"""Return the number of frames loaded."""
return len(self.timestamps)
@property
def cam0(self):
"""Generator to read image files for cam0 (monochrome left)."""
return utils.yield_images(self.cam0_files, mode='L')
def get_cam0(self, idx):
"""Read image file for cam0 (monochrome left) at the specified index."""
return utils.load_image(self.cam0_files[idx], mode='L')
@property
def cam1(self):
"""Generator to read image files for cam1 (monochrome right)."""
return utils.yield_images(self.cam1_files, mode='L')
def get_cam1(self, idx):
"""Read image file for cam1 (monochrome right) at the specified index."""
return utils.load_image(self.cam1_files[idx], mode='L')
@property
def cam2(self):
"""Generator to read image files for cam2 (RGB left)."""
return utils.yield_images(self.cam2_files, mode='RGB')
def get_cam2(self, idx):
"""Read image file for cam2 (RGB left) at the specified index."""
return utils.load_image(self.cam2_files[idx], mode='RGB')
@property
def cam3(self):
"""Generator to read image files for cam0 (RGB right)."""
return utils.yield_images(self.cam3_files, mode='RGB')
def get_cam3(self, idx):
"""Read image file for cam3 (RGB right) at the specified index."""
return utils.load_image(self.cam3_files[idx], mode='RGB')
@property
def gray(self):
"""Generator to read monochrome stereo pairs from file.
"""
return zip(self.cam0, self.cam1)
def get_gray(self, idx):
"""Read monochrome stereo pair at the specified index."""
return (self.get_cam0(idx), self.get_cam1(idx))
@property
def rgb(self):
"""Generator to read RGB stereo pairs from file.
"""
return zip(self.cam2, self.cam3)
def get_rgb(self, idx):
"""Read RGB stereo pair at the specified index."""
return (self.get_cam2(idx), self.get_cam3(idx))
@property
def velo(self):
"""Generator to read velodyne [x,y,z,reflectance] scan data from binary files."""
# Return a generator yielding Velodyne scans.
# Each scan is a Nx4 array of [x,y,z,reflectance]
return utils.yield_velo_scans(self.velo_files)
def get_velo(self, idx):
"""Read velodyne [x,y,z,reflectance] scan at the specified index."""
return utils.load_velo_scan(self.velo_files[idx])
def _get_file_lists(self):
"""Find and list data files for each sensor."""
self.cam0_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'image_0',
'*.{}'.format(self.imtype))))
self.cam1_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'image_1',
'*.{}'.format(self.imtype))))
self.cam2_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'image_2',
'*.{}'.format(self.imtype))))
self.cam3_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'image_3',
'*.{}'.format(self.imtype))))
self.velo_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'velodyne',
'*.bin')))
# Subselect the chosen range of frames, if any
if self.frames is not None:
self.cam0_files = utils.subselect_files(
self.cam0_files, self.frames)
self.cam1_files = utils.subselect_files(
self.cam1_files, self.frames)
self.cam2_files = utils.subselect_files(
self.cam2_files, self.frames)
self.cam3_files = utils.subselect_files(
self.cam3_files, self.frames)
self.velo_files = utils.subselect_files(
self.velo_files, self.frames)
def _load_calib(self):
"""Load and compute intrinsic and extrinsic calibration parameters."""
# We'll build the calibration parameters as a dictionary, then
# convert it to a namedtuple to prevent it from being modified later
data = {}
# Load the calibration file
calib_filepath = os.path.join(self.sequence_path, 'calib.txt')
filedata = utils.read_calib_file(calib_filepath)
# Create 3x4 projection matrices
P_rect_00 = np.reshape(filedata['P0'], (3, 4))
P_rect_10 = np.reshape(filedata['P1'], (3, 4))
P_rect_20 = np.reshape(filedata['P2'], (3, 4))
P_rect_30 = np.reshape(filedata['P3'], (3, 4))
data['P_rect_00'] = P_rect_00
data['P_rect_10'] = P_rect_10
data['P_rect_20'] = P_rect_20
data['P_rect_30'] = P_rect_30
# Compute the rectified extrinsics from cam0 to camN
T1 = np.eye(4)
T1[0, 3] = P_rect_10[0, 3] / P_rect_10[0, 0]
T2 = np.eye(4)
T2[0, 3] = P_rect_20[0, 3] / P_rect_20[0, 0]
T3 = np.eye(4)
T3[0, 3] = P_rect_30[0, 3] / P_rect_30[0, 0]
# Compute the velodyne to rectified camera coordinate transforms
data['T_cam0_velo'] = np.reshape(filedata['Tr'], (3, 4))
data['T_cam0_velo'] = np.vstack([data['T_cam0_velo'], [0, 0, 0, 1]])
data['T_cam1_velo'] = T1.dot(data['T_cam0_velo'])
data['T_cam2_velo'] = T2.dot(data['T_cam0_velo'])
data['T_cam3_velo'] = T3.dot(data['T_cam0_velo'])
# Compute the camera intrinsics
data['K_cam0'] = P_rect_00[0:3, 0:3]
data['K_cam1'] = P_rect_10[0:3, 0:3]
data['K_cam2'] = P_rect_20[0:3, 0:3]
data['K_cam3'] = P_rect_30[0:3, 0:3]
# Compute the stereo baselines in meters by projecting the origin of
# each camera frame into the velodyne frame and computing the distances
# between them
p_cam = np.array([0, 0, 0, 1])
p_velo0 = np.linalg.inv(data['T_cam0_velo']).dot(p_cam)
p_velo1 = np.linalg.inv(data['T_cam1_velo']).dot(p_cam)
p_velo2 = np.linalg.inv(data['T_cam2_velo']).dot(p_cam)
p_velo3 = np.linalg.inv(data['T_cam3_velo']).dot(p_cam)
data['b_gray'] = np.linalg.norm(p_velo1 - p_velo0) # gray baseline
data['b_rgb'] = np.linalg.norm(p_velo3 - p_velo2) # rgb baseline
self.calib = namedtuple('CalibData', data.keys())(*data.values())
def _load_poses(self):
"""Load ground truth poses (T_w_cam0) from file."""
pose_file = os.path.join(self.pose_path, self.sequence + '.txt')
# Read and parse the poses
poses = []
try:
with open(pose_file, 'r') as f:
lines = f.readlines()
if self.frames is not None:
lines = [lines[i] for i in self.frames]
for line in lines:
T_w_cam0 = np.fromstring(line, dtype=float, sep=' ')
T_w_cam0 = T_w_cam0.reshape(3, 4)
T_w_cam0 = np.vstack((T_w_cam0, [0, 0, 0, 1]))
poses.append(T_w_cam0)
except FileNotFoundError:
print('Ground truth poses are not available for sequence ' +
self.sequence + '.')
self.poses = poses
|
utiasSTARS/pykitti
|
pykitti/odometry.py
|
odometry._load_poses
|
python
|
def _load_poses(self):
pose_file = os.path.join(self.pose_path, self.sequence + '.txt')
# Read and parse the poses
poses = []
try:
with open(pose_file, 'r') as f:
lines = f.readlines()
if self.frames is not None:
lines = [lines[i] for i in self.frames]
for line in lines:
T_w_cam0 = np.fromstring(line, dtype=float, sep=' ')
T_w_cam0 = T_w_cam0.reshape(3, 4)
T_w_cam0 = np.vstack((T_w_cam0, [0, 0, 0, 1]))
poses.append(T_w_cam0)
except FileNotFoundError:
print('Ground truth poses are not available for sequence ' +
self.sequence + '.')
self.poses = poses
|
Load ground truth poses (T_w_cam0) from file.
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/odometry.py#L216-L238
| null |
class odometry:
"""Load and parse odometry benchmark data into a usable format."""
def __init__(self, base_path, sequence, **kwargs):
"""Set the path."""
self.sequence = sequence
self.sequence_path = os.path.join(base_path, 'sequences', sequence)
self.pose_path = os.path.join(base_path, 'poses')
self.frames = kwargs.get('frames', None)
# Default image file extension is 'png'
self.imtype = kwargs.get('imtype', 'png')
# Find all the data files
self._get_file_lists()
# Pre-load data that isn't returned as a generator
self._load_calib()
self._load_timestamps()
self._load_poses()
def __len__(self):
"""Return the number of frames loaded."""
return len(self.timestamps)
@property
def cam0(self):
"""Generator to read image files for cam0 (monochrome left)."""
return utils.yield_images(self.cam0_files, mode='L')
def get_cam0(self, idx):
"""Read image file for cam0 (monochrome left) at the specified index."""
return utils.load_image(self.cam0_files[idx], mode='L')
@property
def cam1(self):
"""Generator to read image files for cam1 (monochrome right)."""
return utils.yield_images(self.cam1_files, mode='L')
def get_cam1(self, idx):
"""Read image file for cam1 (monochrome right) at the specified index."""
return utils.load_image(self.cam1_files[idx], mode='L')
@property
def cam2(self):
"""Generator to read image files for cam2 (RGB left)."""
return utils.yield_images(self.cam2_files, mode='RGB')
def get_cam2(self, idx):
"""Read image file for cam2 (RGB left) at the specified index."""
return utils.load_image(self.cam2_files[idx], mode='RGB')
@property
def cam3(self):
"""Generator to read image files for cam0 (RGB right)."""
return utils.yield_images(self.cam3_files, mode='RGB')
def get_cam3(self, idx):
"""Read image file for cam3 (RGB right) at the specified index."""
return utils.load_image(self.cam3_files[idx], mode='RGB')
@property
def gray(self):
"""Generator to read monochrome stereo pairs from file.
"""
return zip(self.cam0, self.cam1)
def get_gray(self, idx):
"""Read monochrome stereo pair at the specified index."""
return (self.get_cam0(idx), self.get_cam1(idx))
@property
def rgb(self):
"""Generator to read RGB stereo pairs from file.
"""
return zip(self.cam2, self.cam3)
def get_rgb(self, idx):
"""Read RGB stereo pair at the specified index."""
return (self.get_cam2(idx), self.get_cam3(idx))
@property
def velo(self):
"""Generator to read velodyne [x,y,z,reflectance] scan data from binary files."""
# Return a generator yielding Velodyne scans.
# Each scan is a Nx4 array of [x,y,z,reflectance]
return utils.yield_velo_scans(self.velo_files)
def get_velo(self, idx):
"""Read velodyne [x,y,z,reflectance] scan at the specified index."""
return utils.load_velo_scan(self.velo_files[idx])
def _get_file_lists(self):
"""Find and list data files for each sensor."""
self.cam0_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'image_0',
'*.{}'.format(self.imtype))))
self.cam1_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'image_1',
'*.{}'.format(self.imtype))))
self.cam2_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'image_2',
'*.{}'.format(self.imtype))))
self.cam3_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'image_3',
'*.{}'.format(self.imtype))))
self.velo_files = sorted(glob.glob(
os.path.join(self.sequence_path, 'velodyne',
'*.bin')))
# Subselect the chosen range of frames, if any
if self.frames is not None:
self.cam0_files = utils.subselect_files(
self.cam0_files, self.frames)
self.cam1_files = utils.subselect_files(
self.cam1_files, self.frames)
self.cam2_files = utils.subselect_files(
self.cam2_files, self.frames)
self.cam3_files = utils.subselect_files(
self.cam3_files, self.frames)
self.velo_files = utils.subselect_files(
self.velo_files, self.frames)
def _load_calib(self):
"""Load and compute intrinsic and extrinsic calibration parameters."""
# We'll build the calibration parameters as a dictionary, then
# convert it to a namedtuple to prevent it from being modified later
data = {}
# Load the calibration file
calib_filepath = os.path.join(self.sequence_path, 'calib.txt')
filedata = utils.read_calib_file(calib_filepath)
# Create 3x4 projection matrices
P_rect_00 = np.reshape(filedata['P0'], (3, 4))
P_rect_10 = np.reshape(filedata['P1'], (3, 4))
P_rect_20 = np.reshape(filedata['P2'], (3, 4))
P_rect_30 = np.reshape(filedata['P3'], (3, 4))
data['P_rect_00'] = P_rect_00
data['P_rect_10'] = P_rect_10
data['P_rect_20'] = P_rect_20
data['P_rect_30'] = P_rect_30
# Compute the rectified extrinsics from cam0 to camN
T1 = np.eye(4)
T1[0, 3] = P_rect_10[0, 3] / P_rect_10[0, 0]
T2 = np.eye(4)
T2[0, 3] = P_rect_20[0, 3] / P_rect_20[0, 0]
T3 = np.eye(4)
T3[0, 3] = P_rect_30[0, 3] / P_rect_30[0, 0]
# Compute the velodyne to rectified camera coordinate transforms
data['T_cam0_velo'] = np.reshape(filedata['Tr'], (3, 4))
data['T_cam0_velo'] = np.vstack([data['T_cam0_velo'], [0, 0, 0, 1]])
data['T_cam1_velo'] = T1.dot(data['T_cam0_velo'])
data['T_cam2_velo'] = T2.dot(data['T_cam0_velo'])
data['T_cam3_velo'] = T3.dot(data['T_cam0_velo'])
# Compute the camera intrinsics
data['K_cam0'] = P_rect_00[0:3, 0:3]
data['K_cam1'] = P_rect_10[0:3, 0:3]
data['K_cam2'] = P_rect_20[0:3, 0:3]
data['K_cam3'] = P_rect_30[0:3, 0:3]
# Compute the stereo baselines in meters by projecting the origin of
# each camera frame into the velodyne frame and computing the distances
# between them
p_cam = np.array([0, 0, 0, 1])
p_velo0 = np.linalg.inv(data['T_cam0_velo']).dot(p_cam)
p_velo1 = np.linalg.inv(data['T_cam1_velo']).dot(p_cam)
p_velo2 = np.linalg.inv(data['T_cam2_velo']).dot(p_cam)
p_velo3 = np.linalg.inv(data['T_cam3_velo']).dot(p_cam)
data['b_gray'] = np.linalg.norm(p_velo1 - p_velo0) # gray baseline
data['b_rgb'] = np.linalg.norm(p_velo3 - p_velo2) # rgb baseline
self.calib = namedtuple('CalibData', data.keys())(*data.values())
def _load_timestamps(self):
"""Load timestamps from file."""
timestamp_file = os.path.join(self.sequence_path, 'times.txt')
# Read and parse the timestamps
self.timestamps = []
with open(timestamp_file, 'r') as f:
for line in f.readlines():
t = dt.timedelta(seconds=float(line))
self.timestamps.append(t)
# Subselect the chosen range of frames, if any
if self.frames is not None:
self.timestamps = [self.timestamps[i] for i in self.frames]
|
utiasSTARS/pykitti
|
pykitti/utils.py
|
rotx
|
python
|
def rotx(t):
c = np.cos(t)
s = np.sin(t)
return np.array([[1, 0, 0],
[0, c, -s],
[0, s, c]])
|
Rotation about the x-axis.
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/utils.py#L34-L40
| null |
"""Provides helper methods for loading and parsing KITTI data."""
from collections import namedtuple
import numpy as np
from PIL import Image
__author__ = "Lee Clement"
__email__ = "lee.clement@robotics.utias.utoronto.ca"
# Per dataformat.txt
OxtsPacket = namedtuple('OxtsPacket',
'lat, lon, alt, ' +
'roll, pitch, yaw, ' +
'vn, ve, vf, vl, vu, ' +
'ax, ay, az, af, al, au, ' +
'wx, wy, wz, wf, wl, wu, ' +
'pos_accuracy, vel_accuracy, ' +
'navstat, numsats, ' +
'posmode, velmode, orimode')
# Bundle into an easy-to-access structure
OxtsData = namedtuple('OxtsData', 'packet, T_w_imu')
def subselect_files(files, indices):
try:
files = [files[i] for i in indices]
except:
pass
return files
def roty(t):
"""Rotation about the y-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[c, 0, s],
[0, 1, 0],
[-s, 0, c]])
def rotz(t):
"""Rotation about the z-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[c, -s, 0],
[s, c, 0],
[0, 0, 1]])
def transform_from_rot_trans(R, t):
"""Transforation matrix from rotation matrix and translation vector."""
R = R.reshape(3, 3)
t = t.reshape(3, 1)
return np.vstack((np.hstack([R, t]), [0, 0, 0, 1]))
def read_calib_file(filepath):
"""Read in a calibration file and parse into a dictionary."""
data = {}
with open(filepath, 'r') as f:
for line in f.readlines():
key, value = line.split(':', 1)
# The only non-float values in these files are dates, which
# we don't care about anyway
try:
data[key] = np.array([float(x) for x in value.split()])
except ValueError:
pass
return data
def pose_from_oxts_packet(packet, scale):
"""Helper method to compute a SE(3) pose matrix from an OXTS packet.
"""
er = 6378137. # earth radius (approx.) in meters
# Use a Mercator projection to get the translation vector
tx = scale * packet.lon * np.pi * er / 180.
ty = scale * er * \
np.log(np.tan((90. + packet.lat) * np.pi / 360.))
tz = packet.alt
t = np.array([tx, ty, tz])
# Use the Euler angles to get the rotation matrix
Rx = rotx(packet.roll)
Ry = roty(packet.pitch)
Rz = rotz(packet.yaw)
R = Rz.dot(Ry.dot(Rx))
# Combine the translation and rotation into a homogeneous transform
return R, t
def load_oxts_packets_and_poses(oxts_files):
"""Generator to read OXTS ground truth data.
Poses are given in an East-North-Up coordinate system
whose origin is the first GPS position.
"""
# Scale for Mercator projection (from first lat value)
scale = None
# Origin of the global coordinate system (first GPS position)
origin = None
oxts = []
for filename in oxts_files:
with open(filename, 'r') as f:
for line in f.readlines():
line = line.split()
# Last five entries are flags and counts
line[:-5] = [float(x) for x in line[:-5]]
line[-5:] = [int(float(x)) for x in line[-5:]]
packet = OxtsPacket(*line)
if scale is None:
scale = np.cos(packet.lat * np.pi / 180.)
R, t = pose_from_oxts_packet(packet, scale)
if origin is None:
origin = t
T_w_imu = transform_from_rot_trans(R, t - origin)
oxts.append(OxtsData(packet, T_w_imu))
return oxts
def load_image(file, mode):
"""Load an image from file."""
return Image.open(file).convert(mode)
def yield_images(imfiles, mode):
"""Generator to read image files."""
for file in imfiles:
yield load_image(file, mode)
def load_velo_scan(file):
"""Load and parse a velodyne binary file."""
scan = np.fromfile(file, dtype=np.float32)
return scan.reshape((-1, 4))
def yield_velo_scans(velo_files):
"""Generator to parse velodyne binary files into arrays."""
for file in velo_files:
yield load_velo_scan(file)
|
utiasSTARS/pykitti
|
pykitti/utils.py
|
roty
|
python
|
def roty(t):
c = np.cos(t)
s = np.sin(t)
return np.array([[c, 0, s],
[0, 1, 0],
[-s, 0, c]])
|
Rotation about the y-axis.
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/utils.py#L43-L49
| null |
"""Provides helper methods for loading and parsing KITTI data."""
from collections import namedtuple
import numpy as np
from PIL import Image
__author__ = "Lee Clement"
__email__ = "lee.clement@robotics.utias.utoronto.ca"
# Per dataformat.txt
OxtsPacket = namedtuple('OxtsPacket',
'lat, lon, alt, ' +
'roll, pitch, yaw, ' +
'vn, ve, vf, vl, vu, ' +
'ax, ay, az, af, al, au, ' +
'wx, wy, wz, wf, wl, wu, ' +
'pos_accuracy, vel_accuracy, ' +
'navstat, numsats, ' +
'posmode, velmode, orimode')
# Bundle into an easy-to-access structure
OxtsData = namedtuple('OxtsData', 'packet, T_w_imu')
def subselect_files(files, indices):
try:
files = [files[i] for i in indices]
except:
pass
return files
def rotx(t):
"""Rotation about the x-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[1, 0, 0],
[0, c, -s],
[0, s, c]])
def rotz(t):
"""Rotation about the z-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[c, -s, 0],
[s, c, 0],
[0, 0, 1]])
def transform_from_rot_trans(R, t):
"""Transforation matrix from rotation matrix and translation vector."""
R = R.reshape(3, 3)
t = t.reshape(3, 1)
return np.vstack((np.hstack([R, t]), [0, 0, 0, 1]))
def read_calib_file(filepath):
"""Read in a calibration file and parse into a dictionary."""
data = {}
with open(filepath, 'r') as f:
for line in f.readlines():
key, value = line.split(':', 1)
# The only non-float values in these files are dates, which
# we don't care about anyway
try:
data[key] = np.array([float(x) for x in value.split()])
except ValueError:
pass
return data
def pose_from_oxts_packet(packet, scale):
"""Helper method to compute a SE(3) pose matrix from an OXTS packet.
"""
er = 6378137. # earth radius (approx.) in meters
# Use a Mercator projection to get the translation vector
tx = scale * packet.lon * np.pi * er / 180.
ty = scale * er * \
np.log(np.tan((90. + packet.lat) * np.pi / 360.))
tz = packet.alt
t = np.array([tx, ty, tz])
# Use the Euler angles to get the rotation matrix
Rx = rotx(packet.roll)
Ry = roty(packet.pitch)
Rz = rotz(packet.yaw)
R = Rz.dot(Ry.dot(Rx))
# Combine the translation and rotation into a homogeneous transform
return R, t
def load_oxts_packets_and_poses(oxts_files):
"""Generator to read OXTS ground truth data.
Poses are given in an East-North-Up coordinate system
whose origin is the first GPS position.
"""
# Scale for Mercator projection (from first lat value)
scale = None
# Origin of the global coordinate system (first GPS position)
origin = None
oxts = []
for filename in oxts_files:
with open(filename, 'r') as f:
for line in f.readlines():
line = line.split()
# Last five entries are flags and counts
line[:-5] = [float(x) for x in line[:-5]]
line[-5:] = [int(float(x)) for x in line[-5:]]
packet = OxtsPacket(*line)
if scale is None:
scale = np.cos(packet.lat * np.pi / 180.)
R, t = pose_from_oxts_packet(packet, scale)
if origin is None:
origin = t
T_w_imu = transform_from_rot_trans(R, t - origin)
oxts.append(OxtsData(packet, T_w_imu))
return oxts
def load_image(file, mode):
"""Load an image from file."""
return Image.open(file).convert(mode)
def yield_images(imfiles, mode):
"""Generator to read image files."""
for file in imfiles:
yield load_image(file, mode)
def load_velo_scan(file):
"""Load and parse a velodyne binary file."""
scan = np.fromfile(file, dtype=np.float32)
return scan.reshape((-1, 4))
def yield_velo_scans(velo_files):
"""Generator to parse velodyne binary files into arrays."""
for file in velo_files:
yield load_velo_scan(file)
|
utiasSTARS/pykitti
|
pykitti/utils.py
|
rotz
|
python
|
def rotz(t):
c = np.cos(t)
s = np.sin(t)
return np.array([[c, -s, 0],
[s, c, 0],
[0, 0, 1]])
|
Rotation about the z-axis.
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/utils.py#L52-L58
| null |
"""Provides helper methods for loading and parsing KITTI data."""
from collections import namedtuple
import numpy as np
from PIL import Image
__author__ = "Lee Clement"
__email__ = "lee.clement@robotics.utias.utoronto.ca"
# Per dataformat.txt
OxtsPacket = namedtuple('OxtsPacket',
'lat, lon, alt, ' +
'roll, pitch, yaw, ' +
'vn, ve, vf, vl, vu, ' +
'ax, ay, az, af, al, au, ' +
'wx, wy, wz, wf, wl, wu, ' +
'pos_accuracy, vel_accuracy, ' +
'navstat, numsats, ' +
'posmode, velmode, orimode')
# Bundle into an easy-to-access structure
OxtsData = namedtuple('OxtsData', 'packet, T_w_imu')
def subselect_files(files, indices):
try:
files = [files[i] for i in indices]
except:
pass
return files
def rotx(t):
"""Rotation about the x-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[1, 0, 0],
[0, c, -s],
[0, s, c]])
def roty(t):
"""Rotation about the y-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[c, 0, s],
[0, 1, 0],
[-s, 0, c]])
def transform_from_rot_trans(R, t):
"""Transforation matrix from rotation matrix and translation vector."""
R = R.reshape(3, 3)
t = t.reshape(3, 1)
return np.vstack((np.hstack([R, t]), [0, 0, 0, 1]))
def read_calib_file(filepath):
"""Read in a calibration file and parse into a dictionary."""
data = {}
with open(filepath, 'r') as f:
for line in f.readlines():
key, value = line.split(':', 1)
# The only non-float values in these files are dates, which
# we don't care about anyway
try:
data[key] = np.array([float(x) for x in value.split()])
except ValueError:
pass
return data
def pose_from_oxts_packet(packet, scale):
"""Helper method to compute a SE(3) pose matrix from an OXTS packet.
"""
er = 6378137. # earth radius (approx.) in meters
# Use a Mercator projection to get the translation vector
tx = scale * packet.lon * np.pi * er / 180.
ty = scale * er * \
np.log(np.tan((90. + packet.lat) * np.pi / 360.))
tz = packet.alt
t = np.array([tx, ty, tz])
# Use the Euler angles to get the rotation matrix
Rx = rotx(packet.roll)
Ry = roty(packet.pitch)
Rz = rotz(packet.yaw)
R = Rz.dot(Ry.dot(Rx))
# Combine the translation and rotation into a homogeneous transform
return R, t
def load_oxts_packets_and_poses(oxts_files):
"""Generator to read OXTS ground truth data.
Poses are given in an East-North-Up coordinate system
whose origin is the first GPS position.
"""
# Scale for Mercator projection (from first lat value)
scale = None
# Origin of the global coordinate system (first GPS position)
origin = None
oxts = []
for filename in oxts_files:
with open(filename, 'r') as f:
for line in f.readlines():
line = line.split()
# Last five entries are flags and counts
line[:-5] = [float(x) for x in line[:-5]]
line[-5:] = [int(float(x)) for x in line[-5:]]
packet = OxtsPacket(*line)
if scale is None:
scale = np.cos(packet.lat * np.pi / 180.)
R, t = pose_from_oxts_packet(packet, scale)
if origin is None:
origin = t
T_w_imu = transform_from_rot_trans(R, t - origin)
oxts.append(OxtsData(packet, T_w_imu))
return oxts
def load_image(file, mode):
"""Load an image from file."""
return Image.open(file).convert(mode)
def yield_images(imfiles, mode):
"""Generator to read image files."""
for file in imfiles:
yield load_image(file, mode)
def load_velo_scan(file):
"""Load and parse a velodyne binary file."""
scan = np.fromfile(file, dtype=np.float32)
return scan.reshape((-1, 4))
def yield_velo_scans(velo_files):
"""Generator to parse velodyne binary files into arrays."""
for file in velo_files:
yield load_velo_scan(file)
|
utiasSTARS/pykitti
|
pykitti/utils.py
|
transform_from_rot_trans
|
python
|
def transform_from_rot_trans(R, t):
R = R.reshape(3, 3)
t = t.reshape(3, 1)
return np.vstack((np.hstack([R, t]), [0, 0, 0, 1]))
|
Transforation matrix from rotation matrix and translation vector.
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/utils.py#L61-L65
| null |
"""Provides helper methods for loading and parsing KITTI data."""
from collections import namedtuple
import numpy as np
from PIL import Image
__author__ = "Lee Clement"
__email__ = "lee.clement@robotics.utias.utoronto.ca"
# Per dataformat.txt
OxtsPacket = namedtuple('OxtsPacket',
'lat, lon, alt, ' +
'roll, pitch, yaw, ' +
'vn, ve, vf, vl, vu, ' +
'ax, ay, az, af, al, au, ' +
'wx, wy, wz, wf, wl, wu, ' +
'pos_accuracy, vel_accuracy, ' +
'navstat, numsats, ' +
'posmode, velmode, orimode')
# Bundle into an easy-to-access structure
OxtsData = namedtuple('OxtsData', 'packet, T_w_imu')
def subselect_files(files, indices):
try:
files = [files[i] for i in indices]
except:
pass
return files
def rotx(t):
"""Rotation about the x-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[1, 0, 0],
[0, c, -s],
[0, s, c]])
def roty(t):
"""Rotation about the y-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[c, 0, s],
[0, 1, 0],
[-s, 0, c]])
def rotz(t):
"""Rotation about the z-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[c, -s, 0],
[s, c, 0],
[0, 0, 1]])
def read_calib_file(filepath):
"""Read in a calibration file and parse into a dictionary."""
data = {}
with open(filepath, 'r') as f:
for line in f.readlines():
key, value = line.split(':', 1)
# The only non-float values in these files are dates, which
# we don't care about anyway
try:
data[key] = np.array([float(x) for x in value.split()])
except ValueError:
pass
return data
def pose_from_oxts_packet(packet, scale):
"""Helper method to compute a SE(3) pose matrix from an OXTS packet.
"""
er = 6378137. # earth radius (approx.) in meters
# Use a Mercator projection to get the translation vector
tx = scale * packet.lon * np.pi * er / 180.
ty = scale * er * \
np.log(np.tan((90. + packet.lat) * np.pi / 360.))
tz = packet.alt
t = np.array([tx, ty, tz])
# Use the Euler angles to get the rotation matrix
Rx = rotx(packet.roll)
Ry = roty(packet.pitch)
Rz = rotz(packet.yaw)
R = Rz.dot(Ry.dot(Rx))
# Combine the translation and rotation into a homogeneous transform
return R, t
def load_oxts_packets_and_poses(oxts_files):
"""Generator to read OXTS ground truth data.
Poses are given in an East-North-Up coordinate system
whose origin is the first GPS position.
"""
# Scale for Mercator projection (from first lat value)
scale = None
# Origin of the global coordinate system (first GPS position)
origin = None
oxts = []
for filename in oxts_files:
with open(filename, 'r') as f:
for line in f.readlines():
line = line.split()
# Last five entries are flags and counts
line[:-5] = [float(x) for x in line[:-5]]
line[-5:] = [int(float(x)) for x in line[-5:]]
packet = OxtsPacket(*line)
if scale is None:
scale = np.cos(packet.lat * np.pi / 180.)
R, t = pose_from_oxts_packet(packet, scale)
if origin is None:
origin = t
T_w_imu = transform_from_rot_trans(R, t - origin)
oxts.append(OxtsData(packet, T_w_imu))
return oxts
def load_image(file, mode):
"""Load an image from file."""
return Image.open(file).convert(mode)
def yield_images(imfiles, mode):
"""Generator to read image files."""
for file in imfiles:
yield load_image(file, mode)
def load_velo_scan(file):
"""Load and parse a velodyne binary file."""
scan = np.fromfile(file, dtype=np.float32)
return scan.reshape((-1, 4))
def yield_velo_scans(velo_files):
"""Generator to parse velodyne binary files into arrays."""
for file in velo_files:
yield load_velo_scan(file)
|
utiasSTARS/pykitti
|
pykitti/utils.py
|
read_calib_file
|
python
|
def read_calib_file(filepath):
data = {}
with open(filepath, 'r') as f:
for line in f.readlines():
key, value = line.split(':', 1)
# The only non-float values in these files are dates, which
# we don't care about anyway
try:
data[key] = np.array([float(x) for x in value.split()])
except ValueError:
pass
return data
|
Read in a calibration file and parse into a dictionary.
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/utils.py#L68-L82
| null |
"""Provides helper methods for loading and parsing KITTI data."""
from collections import namedtuple
import numpy as np
from PIL import Image
__author__ = "Lee Clement"
__email__ = "lee.clement@robotics.utias.utoronto.ca"
# Per dataformat.txt
OxtsPacket = namedtuple('OxtsPacket',
'lat, lon, alt, ' +
'roll, pitch, yaw, ' +
'vn, ve, vf, vl, vu, ' +
'ax, ay, az, af, al, au, ' +
'wx, wy, wz, wf, wl, wu, ' +
'pos_accuracy, vel_accuracy, ' +
'navstat, numsats, ' +
'posmode, velmode, orimode')
# Bundle into an easy-to-access structure
OxtsData = namedtuple('OxtsData', 'packet, T_w_imu')
def subselect_files(files, indices):
try:
files = [files[i] for i in indices]
except:
pass
return files
def rotx(t):
"""Rotation about the x-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[1, 0, 0],
[0, c, -s],
[0, s, c]])
def roty(t):
"""Rotation about the y-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[c, 0, s],
[0, 1, 0],
[-s, 0, c]])
def rotz(t):
"""Rotation about the z-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[c, -s, 0],
[s, c, 0],
[0, 0, 1]])
def transform_from_rot_trans(R, t):
"""Transforation matrix from rotation matrix and translation vector."""
R = R.reshape(3, 3)
t = t.reshape(3, 1)
return np.vstack((np.hstack([R, t]), [0, 0, 0, 1]))
def pose_from_oxts_packet(packet, scale):
"""Helper method to compute a SE(3) pose matrix from an OXTS packet.
"""
er = 6378137. # earth radius (approx.) in meters
# Use a Mercator projection to get the translation vector
tx = scale * packet.lon * np.pi * er / 180.
ty = scale * er * \
np.log(np.tan((90. + packet.lat) * np.pi / 360.))
tz = packet.alt
t = np.array([tx, ty, tz])
# Use the Euler angles to get the rotation matrix
Rx = rotx(packet.roll)
Ry = roty(packet.pitch)
Rz = rotz(packet.yaw)
R = Rz.dot(Ry.dot(Rx))
# Combine the translation and rotation into a homogeneous transform
return R, t
def load_oxts_packets_and_poses(oxts_files):
"""Generator to read OXTS ground truth data.
Poses are given in an East-North-Up coordinate system
whose origin is the first GPS position.
"""
# Scale for Mercator projection (from first lat value)
scale = None
# Origin of the global coordinate system (first GPS position)
origin = None
oxts = []
for filename in oxts_files:
with open(filename, 'r') as f:
for line in f.readlines():
line = line.split()
# Last five entries are flags and counts
line[:-5] = [float(x) for x in line[:-5]]
line[-5:] = [int(float(x)) for x in line[-5:]]
packet = OxtsPacket(*line)
if scale is None:
scale = np.cos(packet.lat * np.pi / 180.)
R, t = pose_from_oxts_packet(packet, scale)
if origin is None:
origin = t
T_w_imu = transform_from_rot_trans(R, t - origin)
oxts.append(OxtsData(packet, T_w_imu))
return oxts
def load_image(file, mode):
"""Load an image from file."""
return Image.open(file).convert(mode)
def yield_images(imfiles, mode):
"""Generator to read image files."""
for file in imfiles:
yield load_image(file, mode)
def load_velo_scan(file):
"""Load and parse a velodyne binary file."""
scan = np.fromfile(file, dtype=np.float32)
return scan.reshape((-1, 4))
def yield_velo_scans(velo_files):
"""Generator to parse velodyne binary files into arrays."""
for file in velo_files:
yield load_velo_scan(file)
|
utiasSTARS/pykitti
|
pykitti/utils.py
|
pose_from_oxts_packet
|
python
|
def pose_from_oxts_packet(packet, scale):
er = 6378137. # earth radius (approx.) in meters
# Use a Mercator projection to get the translation vector
tx = scale * packet.lon * np.pi * er / 180.
ty = scale * er * \
np.log(np.tan((90. + packet.lat) * np.pi / 360.))
tz = packet.alt
t = np.array([tx, ty, tz])
# Use the Euler angles to get the rotation matrix
Rx = rotx(packet.roll)
Ry = roty(packet.pitch)
Rz = rotz(packet.yaw)
R = Rz.dot(Ry.dot(Rx))
# Combine the translation and rotation into a homogeneous transform
return R, t
|
Helper method to compute a SE(3) pose matrix from an OXTS packet.
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/utils.py#L85-L104
|
[
"def rotx(t):\n \"\"\"Rotation about the x-axis.\"\"\"\n c = np.cos(t)\n s = np.sin(t)\n return np.array([[1, 0, 0],\n [0, c, -s],\n [0, s, c]])\n",
"def roty(t):\n \"\"\"Rotation about the y-axis.\"\"\"\n c = np.cos(t)\n s = np.sin(t)\n return np.array([[c, 0, s],\n [0, 1, 0],\n [-s, 0, c]])\n",
"def rotz(t):\n \"\"\"Rotation about the z-axis.\"\"\"\n c = np.cos(t)\n s = np.sin(t)\n return np.array([[c, -s, 0],\n [s, c, 0],\n [0, 0, 1]])\n"
] |
"""Provides helper methods for loading and parsing KITTI data."""
from collections import namedtuple
import numpy as np
from PIL import Image
__author__ = "Lee Clement"
__email__ = "lee.clement@robotics.utias.utoronto.ca"
# Per dataformat.txt
OxtsPacket = namedtuple('OxtsPacket',
'lat, lon, alt, ' +
'roll, pitch, yaw, ' +
'vn, ve, vf, vl, vu, ' +
'ax, ay, az, af, al, au, ' +
'wx, wy, wz, wf, wl, wu, ' +
'pos_accuracy, vel_accuracy, ' +
'navstat, numsats, ' +
'posmode, velmode, orimode')
# Bundle into an easy-to-access structure
OxtsData = namedtuple('OxtsData', 'packet, T_w_imu')
def subselect_files(files, indices):
try:
files = [files[i] for i in indices]
except:
pass
return files
def rotx(t):
"""Rotation about the x-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[1, 0, 0],
[0, c, -s],
[0, s, c]])
def roty(t):
"""Rotation about the y-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[c, 0, s],
[0, 1, 0],
[-s, 0, c]])
def rotz(t):
"""Rotation about the z-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[c, -s, 0],
[s, c, 0],
[0, 0, 1]])
def transform_from_rot_trans(R, t):
"""Transforation matrix from rotation matrix and translation vector."""
R = R.reshape(3, 3)
t = t.reshape(3, 1)
return np.vstack((np.hstack([R, t]), [0, 0, 0, 1]))
def read_calib_file(filepath):
"""Read in a calibration file and parse into a dictionary."""
data = {}
with open(filepath, 'r') as f:
for line in f.readlines():
key, value = line.split(':', 1)
# The only non-float values in these files are dates, which
# we don't care about anyway
try:
data[key] = np.array([float(x) for x in value.split()])
except ValueError:
pass
return data
def load_oxts_packets_and_poses(oxts_files):
"""Generator to read OXTS ground truth data.
Poses are given in an East-North-Up coordinate system
whose origin is the first GPS position.
"""
# Scale for Mercator projection (from first lat value)
scale = None
# Origin of the global coordinate system (first GPS position)
origin = None
oxts = []
for filename in oxts_files:
with open(filename, 'r') as f:
for line in f.readlines():
line = line.split()
# Last five entries are flags and counts
line[:-5] = [float(x) for x in line[:-5]]
line[-5:] = [int(float(x)) for x in line[-5:]]
packet = OxtsPacket(*line)
if scale is None:
scale = np.cos(packet.lat * np.pi / 180.)
R, t = pose_from_oxts_packet(packet, scale)
if origin is None:
origin = t
T_w_imu = transform_from_rot_trans(R, t - origin)
oxts.append(OxtsData(packet, T_w_imu))
return oxts
def load_image(file, mode):
"""Load an image from file."""
return Image.open(file).convert(mode)
def yield_images(imfiles, mode):
"""Generator to read image files."""
for file in imfiles:
yield load_image(file, mode)
def load_velo_scan(file):
"""Load and parse a velodyne binary file."""
scan = np.fromfile(file, dtype=np.float32)
return scan.reshape((-1, 4))
def yield_velo_scans(velo_files):
"""Generator to parse velodyne binary files into arrays."""
for file in velo_files:
yield load_velo_scan(file)
|
utiasSTARS/pykitti
|
pykitti/utils.py
|
load_oxts_packets_and_poses
|
python
|
def load_oxts_packets_and_poses(oxts_files):
# Scale for Mercator projection (from first lat value)
scale = None
# Origin of the global coordinate system (first GPS position)
origin = None
oxts = []
for filename in oxts_files:
with open(filename, 'r') as f:
for line in f.readlines():
line = line.split()
# Last five entries are flags and counts
line[:-5] = [float(x) for x in line[:-5]]
line[-5:] = [int(float(x)) for x in line[-5:]]
packet = OxtsPacket(*line)
if scale is None:
scale = np.cos(packet.lat * np.pi / 180.)
R, t = pose_from_oxts_packet(packet, scale)
if origin is None:
origin = t
T_w_imu = transform_from_rot_trans(R, t - origin)
oxts.append(OxtsData(packet, T_w_imu))
return oxts
|
Generator to read OXTS ground truth data.
Poses are given in an East-North-Up coordinate system
whose origin is the first GPS position.
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/utils.py#L107-L142
|
[
"def transform_from_rot_trans(R, t):\n \"\"\"Transforation matrix from rotation matrix and translation vector.\"\"\"\n R = R.reshape(3, 3)\n t = t.reshape(3, 1)\n return np.vstack((np.hstack([R, t]), [0, 0, 0, 1]))\n",
"def pose_from_oxts_packet(packet, scale):\n \"\"\"Helper method to compute a SE(3) pose matrix from an OXTS packet.\n \"\"\"\n er = 6378137. # earth radius (approx.) in meters\n\n # Use a Mercator projection to get the translation vector\n tx = scale * packet.lon * np.pi * er / 180.\n ty = scale * er * \\\n np.log(np.tan((90. + packet.lat) * np.pi / 360.))\n tz = packet.alt\n t = np.array([tx, ty, tz])\n\n # Use the Euler angles to get the rotation matrix\n Rx = rotx(packet.roll)\n Ry = roty(packet.pitch)\n Rz = rotz(packet.yaw)\n R = Rz.dot(Ry.dot(Rx))\n\n # Combine the translation and rotation into a homogeneous transform\n return R, t\n"
] |
"""Provides helper methods for loading and parsing KITTI data."""
from collections import namedtuple
import numpy as np
from PIL import Image
__author__ = "Lee Clement"
__email__ = "lee.clement@robotics.utias.utoronto.ca"
# Per dataformat.txt
OxtsPacket = namedtuple('OxtsPacket',
'lat, lon, alt, ' +
'roll, pitch, yaw, ' +
'vn, ve, vf, vl, vu, ' +
'ax, ay, az, af, al, au, ' +
'wx, wy, wz, wf, wl, wu, ' +
'pos_accuracy, vel_accuracy, ' +
'navstat, numsats, ' +
'posmode, velmode, orimode')
# Bundle into an easy-to-access structure
OxtsData = namedtuple('OxtsData', 'packet, T_w_imu')
def subselect_files(files, indices):
try:
files = [files[i] for i in indices]
except:
pass
return files
def rotx(t):
"""Rotation about the x-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[1, 0, 0],
[0, c, -s],
[0, s, c]])
def roty(t):
"""Rotation about the y-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[c, 0, s],
[0, 1, 0],
[-s, 0, c]])
def rotz(t):
"""Rotation about the z-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[c, -s, 0],
[s, c, 0],
[0, 0, 1]])
def transform_from_rot_trans(R, t):
"""Transforation matrix from rotation matrix and translation vector."""
R = R.reshape(3, 3)
t = t.reshape(3, 1)
return np.vstack((np.hstack([R, t]), [0, 0, 0, 1]))
def read_calib_file(filepath):
"""Read in a calibration file and parse into a dictionary."""
data = {}
with open(filepath, 'r') as f:
for line in f.readlines():
key, value = line.split(':', 1)
# The only non-float values in these files are dates, which
# we don't care about anyway
try:
data[key] = np.array([float(x) for x in value.split()])
except ValueError:
pass
return data
def pose_from_oxts_packet(packet, scale):
"""Helper method to compute a SE(3) pose matrix from an OXTS packet.
"""
er = 6378137. # earth radius (approx.) in meters
# Use a Mercator projection to get the translation vector
tx = scale * packet.lon * np.pi * er / 180.
ty = scale * er * \
np.log(np.tan((90. + packet.lat) * np.pi / 360.))
tz = packet.alt
t = np.array([tx, ty, tz])
# Use the Euler angles to get the rotation matrix
Rx = rotx(packet.roll)
Ry = roty(packet.pitch)
Rz = rotz(packet.yaw)
R = Rz.dot(Ry.dot(Rx))
# Combine the translation and rotation into a homogeneous transform
return R, t
def load_image(file, mode):
"""Load an image from file."""
return Image.open(file).convert(mode)
def yield_images(imfiles, mode):
"""Generator to read image files."""
for file in imfiles:
yield load_image(file, mode)
def load_velo_scan(file):
"""Load and parse a velodyne binary file."""
scan = np.fromfile(file, dtype=np.float32)
return scan.reshape((-1, 4))
def yield_velo_scans(velo_files):
"""Generator to parse velodyne binary files into arrays."""
for file in velo_files:
yield load_velo_scan(file)
|
utiasSTARS/pykitti
|
pykitti/utils.py
|
load_velo_scan
|
python
|
def load_velo_scan(file):
scan = np.fromfile(file, dtype=np.float32)
return scan.reshape((-1, 4))
|
Load and parse a velodyne binary file.
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/utils.py#L156-L159
| null |
"""Provides helper methods for loading and parsing KITTI data."""
from collections import namedtuple
import numpy as np
from PIL import Image
__author__ = "Lee Clement"
__email__ = "lee.clement@robotics.utias.utoronto.ca"
# Per dataformat.txt
OxtsPacket = namedtuple('OxtsPacket',
'lat, lon, alt, ' +
'roll, pitch, yaw, ' +
'vn, ve, vf, vl, vu, ' +
'ax, ay, az, af, al, au, ' +
'wx, wy, wz, wf, wl, wu, ' +
'pos_accuracy, vel_accuracy, ' +
'navstat, numsats, ' +
'posmode, velmode, orimode')
# Bundle into an easy-to-access structure
OxtsData = namedtuple('OxtsData', 'packet, T_w_imu')
def subselect_files(files, indices):
try:
files = [files[i] for i in indices]
except:
pass
return files
def rotx(t):
"""Rotation about the x-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[1, 0, 0],
[0, c, -s],
[0, s, c]])
def roty(t):
"""Rotation about the y-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[c, 0, s],
[0, 1, 0],
[-s, 0, c]])
def rotz(t):
"""Rotation about the z-axis."""
c = np.cos(t)
s = np.sin(t)
return np.array([[c, -s, 0],
[s, c, 0],
[0, 0, 1]])
def transform_from_rot_trans(R, t):
"""Transforation matrix from rotation matrix and translation vector."""
R = R.reshape(3, 3)
t = t.reshape(3, 1)
return np.vstack((np.hstack([R, t]), [0, 0, 0, 1]))
def read_calib_file(filepath):
"""Read in a calibration file and parse into a dictionary."""
data = {}
with open(filepath, 'r') as f:
for line in f.readlines():
key, value = line.split(':', 1)
# The only non-float values in these files are dates, which
# we don't care about anyway
try:
data[key] = np.array([float(x) for x in value.split()])
except ValueError:
pass
return data
def pose_from_oxts_packet(packet, scale):
"""Helper method to compute a SE(3) pose matrix from an OXTS packet.
"""
er = 6378137. # earth radius (approx.) in meters
# Use a Mercator projection to get the translation vector
tx = scale * packet.lon * np.pi * er / 180.
ty = scale * er * \
np.log(np.tan((90. + packet.lat) * np.pi / 360.))
tz = packet.alt
t = np.array([tx, ty, tz])
# Use the Euler angles to get the rotation matrix
Rx = rotx(packet.roll)
Ry = roty(packet.pitch)
Rz = rotz(packet.yaw)
R = Rz.dot(Ry.dot(Rx))
# Combine the translation and rotation into a homogeneous transform
return R, t
def load_oxts_packets_and_poses(oxts_files):
"""Generator to read OXTS ground truth data.
Poses are given in an East-North-Up coordinate system
whose origin is the first GPS position.
"""
# Scale for Mercator projection (from first lat value)
scale = None
# Origin of the global coordinate system (first GPS position)
origin = None
oxts = []
for filename in oxts_files:
with open(filename, 'r') as f:
for line in f.readlines():
line = line.split()
# Last five entries are flags and counts
line[:-5] = [float(x) for x in line[:-5]]
line[-5:] = [int(float(x)) for x in line[-5:]]
packet = OxtsPacket(*line)
if scale is None:
scale = np.cos(packet.lat * np.pi / 180.)
R, t = pose_from_oxts_packet(packet, scale)
if origin is None:
origin = t
T_w_imu = transform_from_rot_trans(R, t - origin)
oxts.append(OxtsData(packet, T_w_imu))
return oxts
def load_image(file, mode):
"""Load an image from file."""
return Image.open(file).convert(mode)
def yield_images(imfiles, mode):
"""Generator to read image files."""
for file in imfiles:
yield load_image(file, mode)
def yield_velo_scans(velo_files):
"""Generator to parse velodyne binary files into arrays."""
for file in velo_files:
yield load_velo_scan(file)
|
utiasSTARS/pykitti
|
pykitti/raw.py
|
raw._load_calib_rigid
|
python
|
def _load_calib_rigid(self, filename):
filepath = os.path.join(self.calib_path, filename)
data = utils.read_calib_file(filepath)
return utils.transform_from_rot_trans(data['R'], data['T'])
|
Read a rigid transform calibration file as a numpy.array.
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/raw.py#L144-L148
| null |
class raw:
"""Load and parse raw data into a usable format."""
def __init__(self, base_path, date, drive, **kwargs):
"""Set the path and pre-load calibration data and timestamps."""
self.dataset = kwargs.get('dataset', 'sync')
self.drive = date + '_drive_' + drive + '_' + self.dataset
self.calib_path = os.path.join(base_path, date)
self.data_path = os.path.join(base_path, date, self.drive)
self.frames = kwargs.get('frames', None)
# Default image file extension is '.png'
self.imtype = kwargs.get('imtype', 'png')
# Find all the data files
self._get_file_lists()
# Pre-load data that isn't returned as a generator
self._load_calib()
self._load_timestamps()
self._load_oxts()
def __len__(self):
"""Return the number of frames loaded."""
return len(self.timestamps)
@property
def cam0(self):
"""Generator to read image files for cam0 (monochrome left)."""
return utils.yield_images(self.cam0_files, mode='L')
def get_cam0(self, idx):
"""Read image file for cam0 (monochrome left) at the specified index."""
return utils.load_image(self.cam0_files[idx], mode='L')
@property
def cam1(self):
"""Generator to read image files for cam1 (monochrome right)."""
return utils.yield_images(self.cam1_files, mode='L')
def get_cam1(self, idx):
"""Read image file for cam1 (monochrome right) at the specified index."""
return utils.load_image(self.cam1_files[idx], mode='L')
@property
def cam2(self):
"""Generator to read image files for cam2 (RGB left)."""
return utils.yield_images(self.cam2_files, mode='RGB')
def get_cam2(self, idx):
"""Read image file for cam2 (RGB left) at the specified index."""
return utils.load_image(self.cam2_files[idx], mode='RGB')
@property
def cam3(self):
"""Generator to read image files for cam0 (RGB right)."""
return utils.yield_images(self.cam3_files, mode='RGB')
def get_cam3(self, idx):
"""Read image file for cam3 (RGB right) at the specified index."""
return utils.load_image(self.cam3_files[idx], mode='RGB')
@property
def gray(self):
"""Generator to read monochrome stereo pairs from file.
"""
return zip(self.cam0, self.cam1)
def get_gray(self, idx):
"""Read monochrome stereo pair at the specified index."""
return (self.get_cam0(idx), self.get_cam1(idx))
@property
def rgb(self):
"""Generator to read RGB stereo pairs from file.
"""
return zip(self.cam2, self.cam3)
def get_rgb(self, idx):
"""Read RGB stereo pair at the specified index."""
return (self.get_cam2(idx), self.get_cam3(idx))
@property
def velo(self):
"""Generator to read velodyne [x,y,z,reflectance] scan data from binary files."""
# Return a generator yielding Velodyne scans.
# Each scan is a Nx4 array of [x,y,z,reflectance]
return utils.yield_velo_scans(self.velo_files)
def get_velo(self, idx):
"""Read velodyne [x,y,z,reflectance] scan at the specified index."""
return utils.load_velo_scan(self.velo_files[idx])
def _get_file_lists(self):
"""Find and list data files for each sensor."""
self.oxts_files = sorted(glob.glob(
os.path.join(self.data_path, 'oxts', 'data', '*.txt')))
self.cam0_files = sorted(glob.glob(
os.path.join(self.data_path, 'image_00',
'data', '*.{}'.format(self.imtype))))
self.cam1_files = sorted(glob.glob(
os.path.join(self.data_path, 'image_01',
'data', '*.{}'.format(self.imtype))))
self.cam2_files = sorted(glob.glob(
os.path.join(self.data_path, 'image_02',
'data', '*.{}'.format(self.imtype))))
self.cam3_files = sorted(glob.glob(
os.path.join(self.data_path, 'image_03',
'data', '*.{}'.format(self.imtype))))
self.velo_files = sorted(glob.glob(
os.path.join(self.data_path, 'velodyne_points',
'data', '*.bin')))
# Subselect the chosen range of frames, if any
if self.frames is not None:
self.oxts_files = utils.subselect_files(
self.oxts_files, self.frames)
self.cam0_files = utils.subselect_files(
self.cam0_files, self.frames)
self.cam1_files = utils.subselect_files(
self.cam1_files, self.frames)
self.cam2_files = utils.subselect_files(
self.cam2_files, self.frames)
self.cam3_files = utils.subselect_files(
self.cam3_files, self.frames)
self.velo_files = utils.subselect_files(
self.velo_files, self.frames)
def _load_calib_cam_to_cam(self, velo_to_cam_file, cam_to_cam_file):
# We'll return the camera calibration as a dictionary
data = {}
# Load the rigid transformation from velodyne coordinates
# to unrectified cam0 coordinates
T_cam0unrect_velo = self._load_calib_rigid(velo_to_cam_file)
data['T_cam0_velo_unrect'] = T_cam0unrect_velo
# Load and parse the cam-to-cam calibration data
cam_to_cam_filepath = os.path.join(self.calib_path, cam_to_cam_file)
filedata = utils.read_calib_file(cam_to_cam_filepath)
# Create 3x4 projection matrices
P_rect_00 = np.reshape(filedata['P_rect_00'], (3, 4))
P_rect_10 = np.reshape(filedata['P_rect_01'], (3, 4))
P_rect_20 = np.reshape(filedata['P_rect_02'], (3, 4))
P_rect_30 = np.reshape(filedata['P_rect_03'], (3, 4))
data['P_rect_00'] = P_rect_00
data['P_rect_10'] = P_rect_10
data['P_rect_20'] = P_rect_20
data['P_rect_30'] = P_rect_30
# Create 4x4 matrices from the rectifying rotation matrices
R_rect_00 = np.eye(4)
R_rect_00[0:3, 0:3] = np.reshape(filedata['R_rect_00'], (3, 3))
R_rect_10 = np.eye(4)
R_rect_10[0:3, 0:3] = np.reshape(filedata['R_rect_01'], (3, 3))
R_rect_20 = np.eye(4)
R_rect_20[0:3, 0:3] = np.reshape(filedata['R_rect_02'], (3, 3))
R_rect_30 = np.eye(4)
R_rect_30[0:3, 0:3] = np.reshape(filedata['R_rect_03'], (3, 3))
data['R_rect_00'] = R_rect_00
data['R_rect_10'] = R_rect_10
data['R_rect_20'] = R_rect_20
data['R_rect_30'] = R_rect_30
# Compute the rectified extrinsics from cam0 to camN
T0 = np.eye(4)
T0[0, 3] = P_rect_00[0, 3] / P_rect_00[0, 0]
T1 = np.eye(4)
T1[0, 3] = P_rect_10[0, 3] / P_rect_10[0, 0]
T2 = np.eye(4)
T2[0, 3] = P_rect_20[0, 3] / P_rect_20[0, 0]
T3 = np.eye(4)
T3[0, 3] = P_rect_30[0, 3] / P_rect_30[0, 0]
# Compute the velodyne to rectified camera coordinate transforms
data['T_cam0_velo'] = T0.dot(R_rect_00.dot(T_cam0unrect_velo))
data['T_cam1_velo'] = T1.dot(R_rect_00.dot(T_cam0unrect_velo))
data['T_cam2_velo'] = T2.dot(R_rect_00.dot(T_cam0unrect_velo))
data['T_cam3_velo'] = T3.dot(R_rect_00.dot(T_cam0unrect_velo))
# Compute the camera intrinsics
data['K_cam0'] = P_rect_00[0:3, 0:3]
data['K_cam1'] = P_rect_10[0:3, 0:3]
data['K_cam2'] = P_rect_20[0:3, 0:3]
data['K_cam3'] = P_rect_30[0:3, 0:3]
# Compute the stereo baselines in meters by projecting the origin of
# each camera frame into the velodyne frame and computing the distances
# between them
p_cam = np.array([0, 0, 0, 1])
p_velo0 = np.linalg.inv(data['T_cam0_velo']).dot(p_cam)
p_velo1 = np.linalg.inv(data['T_cam1_velo']).dot(p_cam)
p_velo2 = np.linalg.inv(data['T_cam2_velo']).dot(p_cam)
p_velo3 = np.linalg.inv(data['T_cam3_velo']).dot(p_cam)
data['b_gray'] = np.linalg.norm(p_velo1 - p_velo0) # gray baseline
data['b_rgb'] = np.linalg.norm(p_velo3 - p_velo2) # rgb baseline
return data
def _load_calib(self):
"""Load and compute intrinsic and extrinsic calibration parameters."""
# We'll build the calibration parameters as a dictionary, then
# convert it to a namedtuple to prevent it from being modified later
data = {}
# Load the rigid transformation from IMU to velodyne
data['T_velo_imu'] = self._load_calib_rigid('calib_imu_to_velo.txt')
# Load the camera intrinsics and extrinsics
data.update(self._load_calib_cam_to_cam(
'calib_velo_to_cam.txt', 'calib_cam_to_cam.txt'))
# Pre-compute the IMU to rectified camera coordinate transforms
data['T_cam0_imu'] = data['T_cam0_velo'].dot(data['T_velo_imu'])
data['T_cam1_imu'] = data['T_cam1_velo'].dot(data['T_velo_imu'])
data['T_cam2_imu'] = data['T_cam2_velo'].dot(data['T_velo_imu'])
data['T_cam3_imu'] = data['T_cam3_velo'].dot(data['T_velo_imu'])
self.calib = namedtuple('CalibData', data.keys())(*data.values())
def _load_timestamps(self):
"""Load timestamps from file."""
timestamp_file = os.path.join(
self.data_path, 'oxts', 'timestamps.txt')
# Read and parse the timestamps
self.timestamps = []
with open(timestamp_file, 'r') as f:
for line in f.readlines():
# NB: datetime only supports microseconds, but KITTI timestamps
# give nanoseconds, so need to truncate last 4 characters to
# get rid of \n (counts as 1) and extra 3 digits
t = dt.datetime.strptime(line[:-4], '%Y-%m-%d %H:%M:%S.%f')
self.timestamps.append(t)
# Subselect the chosen range of frames, if any
if self.frames is not None:
self.timestamps = [self.timestamps[i] for i in self.frames]
def _load_oxts(self):
"""Load OXTS data from file."""
self.oxts = utils.load_oxts_packets_and_poses(self.oxts_files)
|
utiasSTARS/pykitti
|
pykitti/raw.py
|
raw._load_calib
|
python
|
def _load_calib(self):
# We'll build the calibration parameters as a dictionary, then
# convert it to a namedtuple to prevent it from being modified later
data = {}
# Load the rigid transformation from IMU to velodyne
data['T_velo_imu'] = self._load_calib_rigid('calib_imu_to_velo.txt')
# Load the camera intrinsics and extrinsics
data.update(self._load_calib_cam_to_cam(
'calib_velo_to_cam.txt', 'calib_cam_to_cam.txt'))
# Pre-compute the IMU to rectified camera coordinate transforms
data['T_cam0_imu'] = data['T_cam0_velo'].dot(data['T_velo_imu'])
data['T_cam1_imu'] = data['T_cam1_velo'].dot(data['T_velo_imu'])
data['T_cam2_imu'] = data['T_cam2_velo'].dot(data['T_velo_imu'])
data['T_cam3_imu'] = data['T_cam3_velo'].dot(data['T_velo_imu'])
self.calib = namedtuple('CalibData', data.keys())(*data.values())
|
Load and compute intrinsic and extrinsic calibration parameters.
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/raw.py#L225-L244
|
[
"def _load_calib_rigid(self, filename):\n \"\"\"Read a rigid transform calibration file as a numpy.array.\"\"\"\n filepath = os.path.join(self.calib_path, filename)\n data = utils.read_calib_file(filepath)\n return utils.transform_from_rot_trans(data['R'], data['T'])\n",
"def _load_calib_cam_to_cam(self, velo_to_cam_file, cam_to_cam_file):\n # We'll return the camera calibration as a dictionary\n data = {}\n\n # Load the rigid transformation from velodyne coordinates\n # to unrectified cam0 coordinates\n T_cam0unrect_velo = self._load_calib_rigid(velo_to_cam_file)\n data['T_cam0_velo_unrect'] = T_cam0unrect_velo\n\n # Load and parse the cam-to-cam calibration data\n cam_to_cam_filepath = os.path.join(self.calib_path, cam_to_cam_file)\n filedata = utils.read_calib_file(cam_to_cam_filepath)\n\n # Create 3x4 projection matrices\n P_rect_00 = np.reshape(filedata['P_rect_00'], (3, 4))\n P_rect_10 = np.reshape(filedata['P_rect_01'], (3, 4))\n P_rect_20 = np.reshape(filedata['P_rect_02'], (3, 4))\n P_rect_30 = np.reshape(filedata['P_rect_03'], (3, 4))\n\n data['P_rect_00'] = P_rect_00\n data['P_rect_10'] = P_rect_10\n data['P_rect_20'] = P_rect_20\n data['P_rect_30'] = P_rect_30\n\n # Create 4x4 matrices from the rectifying rotation matrices\n R_rect_00 = np.eye(4)\n R_rect_00[0:3, 0:3] = np.reshape(filedata['R_rect_00'], (3, 3))\n R_rect_10 = np.eye(4)\n R_rect_10[0:3, 0:3] = np.reshape(filedata['R_rect_01'], (3, 3))\n R_rect_20 = np.eye(4)\n R_rect_20[0:3, 0:3] = np.reshape(filedata['R_rect_02'], (3, 3))\n R_rect_30 = np.eye(4)\n R_rect_30[0:3, 0:3] = np.reshape(filedata['R_rect_03'], (3, 3))\n\n data['R_rect_00'] = R_rect_00\n data['R_rect_10'] = R_rect_10\n data['R_rect_20'] = R_rect_20\n data['R_rect_30'] = R_rect_30\n\n # Compute the rectified extrinsics from cam0 to camN\n T0 = np.eye(4)\n T0[0, 3] = P_rect_00[0, 3] / P_rect_00[0, 0]\n T1 = np.eye(4)\n T1[0, 3] = P_rect_10[0, 3] / P_rect_10[0, 0]\n T2 = np.eye(4)\n T2[0, 3] = P_rect_20[0, 3] / P_rect_20[0, 0]\n T3 = np.eye(4)\n T3[0, 3] = P_rect_30[0, 3] / P_rect_30[0, 0]\n\n # Compute the velodyne to rectified camera coordinate transforms\n data['T_cam0_velo'] = T0.dot(R_rect_00.dot(T_cam0unrect_velo))\n data['T_cam1_velo'] = T1.dot(R_rect_00.dot(T_cam0unrect_velo))\n data['T_cam2_velo'] = T2.dot(R_rect_00.dot(T_cam0unrect_velo))\n data['T_cam3_velo'] = T3.dot(R_rect_00.dot(T_cam0unrect_velo))\n\n # Compute the camera intrinsics\n data['K_cam0'] = P_rect_00[0:3, 0:3]\n data['K_cam1'] = P_rect_10[0:3, 0:3]\n data['K_cam2'] = P_rect_20[0:3, 0:3]\n data['K_cam3'] = P_rect_30[0:3, 0:3]\n\n # Compute the stereo baselines in meters by projecting the origin of\n # each camera frame into the velodyne frame and computing the distances\n # between them\n p_cam = np.array([0, 0, 0, 1])\n p_velo0 = np.linalg.inv(data['T_cam0_velo']).dot(p_cam)\n p_velo1 = np.linalg.inv(data['T_cam1_velo']).dot(p_cam)\n p_velo2 = np.linalg.inv(data['T_cam2_velo']).dot(p_cam)\n p_velo3 = np.linalg.inv(data['T_cam3_velo']).dot(p_cam)\n\n data['b_gray'] = np.linalg.norm(p_velo1 - p_velo0) # gray baseline\n data['b_rgb'] = np.linalg.norm(p_velo3 - p_velo2) # rgb baseline\n\n return data\n"
] |
class raw:
"""Load and parse raw data into a usable format."""
def __init__(self, base_path, date, drive, **kwargs):
"""Set the path and pre-load calibration data and timestamps."""
self.dataset = kwargs.get('dataset', 'sync')
self.drive = date + '_drive_' + drive + '_' + self.dataset
self.calib_path = os.path.join(base_path, date)
self.data_path = os.path.join(base_path, date, self.drive)
self.frames = kwargs.get('frames', None)
# Default image file extension is '.png'
self.imtype = kwargs.get('imtype', 'png')
# Find all the data files
self._get_file_lists()
# Pre-load data that isn't returned as a generator
self._load_calib()
self._load_timestamps()
self._load_oxts()
def __len__(self):
"""Return the number of frames loaded."""
return len(self.timestamps)
@property
def cam0(self):
"""Generator to read image files for cam0 (monochrome left)."""
return utils.yield_images(self.cam0_files, mode='L')
def get_cam0(self, idx):
"""Read image file for cam0 (monochrome left) at the specified index."""
return utils.load_image(self.cam0_files[idx], mode='L')
@property
def cam1(self):
"""Generator to read image files for cam1 (monochrome right)."""
return utils.yield_images(self.cam1_files, mode='L')
def get_cam1(self, idx):
"""Read image file for cam1 (monochrome right) at the specified index."""
return utils.load_image(self.cam1_files[idx], mode='L')
@property
def cam2(self):
"""Generator to read image files for cam2 (RGB left)."""
return utils.yield_images(self.cam2_files, mode='RGB')
def get_cam2(self, idx):
"""Read image file for cam2 (RGB left) at the specified index."""
return utils.load_image(self.cam2_files[idx], mode='RGB')
@property
def cam3(self):
"""Generator to read image files for cam0 (RGB right)."""
return utils.yield_images(self.cam3_files, mode='RGB')
def get_cam3(self, idx):
"""Read image file for cam3 (RGB right) at the specified index."""
return utils.load_image(self.cam3_files[idx], mode='RGB')
@property
def gray(self):
"""Generator to read monochrome stereo pairs from file.
"""
return zip(self.cam0, self.cam1)
def get_gray(self, idx):
"""Read monochrome stereo pair at the specified index."""
return (self.get_cam0(idx), self.get_cam1(idx))
@property
def rgb(self):
"""Generator to read RGB stereo pairs from file.
"""
return zip(self.cam2, self.cam3)
def get_rgb(self, idx):
"""Read RGB stereo pair at the specified index."""
return (self.get_cam2(idx), self.get_cam3(idx))
@property
def velo(self):
"""Generator to read velodyne [x,y,z,reflectance] scan data from binary files."""
# Return a generator yielding Velodyne scans.
# Each scan is a Nx4 array of [x,y,z,reflectance]
return utils.yield_velo_scans(self.velo_files)
def get_velo(self, idx):
"""Read velodyne [x,y,z,reflectance] scan at the specified index."""
return utils.load_velo_scan(self.velo_files[idx])
def _get_file_lists(self):
"""Find and list data files for each sensor."""
self.oxts_files = sorted(glob.glob(
os.path.join(self.data_path, 'oxts', 'data', '*.txt')))
self.cam0_files = sorted(glob.glob(
os.path.join(self.data_path, 'image_00',
'data', '*.{}'.format(self.imtype))))
self.cam1_files = sorted(glob.glob(
os.path.join(self.data_path, 'image_01',
'data', '*.{}'.format(self.imtype))))
self.cam2_files = sorted(glob.glob(
os.path.join(self.data_path, 'image_02',
'data', '*.{}'.format(self.imtype))))
self.cam3_files = sorted(glob.glob(
os.path.join(self.data_path, 'image_03',
'data', '*.{}'.format(self.imtype))))
self.velo_files = sorted(glob.glob(
os.path.join(self.data_path, 'velodyne_points',
'data', '*.bin')))
# Subselect the chosen range of frames, if any
if self.frames is not None:
self.oxts_files = utils.subselect_files(
self.oxts_files, self.frames)
self.cam0_files = utils.subselect_files(
self.cam0_files, self.frames)
self.cam1_files = utils.subselect_files(
self.cam1_files, self.frames)
self.cam2_files = utils.subselect_files(
self.cam2_files, self.frames)
self.cam3_files = utils.subselect_files(
self.cam3_files, self.frames)
self.velo_files = utils.subselect_files(
self.velo_files, self.frames)
def _load_calib_rigid(self, filename):
"""Read a rigid transform calibration file as a numpy.array."""
filepath = os.path.join(self.calib_path, filename)
data = utils.read_calib_file(filepath)
return utils.transform_from_rot_trans(data['R'], data['T'])
def _load_calib_cam_to_cam(self, velo_to_cam_file, cam_to_cam_file):
# We'll return the camera calibration as a dictionary
data = {}
# Load the rigid transformation from velodyne coordinates
# to unrectified cam0 coordinates
T_cam0unrect_velo = self._load_calib_rigid(velo_to_cam_file)
data['T_cam0_velo_unrect'] = T_cam0unrect_velo
# Load and parse the cam-to-cam calibration data
cam_to_cam_filepath = os.path.join(self.calib_path, cam_to_cam_file)
filedata = utils.read_calib_file(cam_to_cam_filepath)
# Create 3x4 projection matrices
P_rect_00 = np.reshape(filedata['P_rect_00'], (3, 4))
P_rect_10 = np.reshape(filedata['P_rect_01'], (3, 4))
P_rect_20 = np.reshape(filedata['P_rect_02'], (3, 4))
P_rect_30 = np.reshape(filedata['P_rect_03'], (3, 4))
data['P_rect_00'] = P_rect_00
data['P_rect_10'] = P_rect_10
data['P_rect_20'] = P_rect_20
data['P_rect_30'] = P_rect_30
# Create 4x4 matrices from the rectifying rotation matrices
R_rect_00 = np.eye(4)
R_rect_00[0:3, 0:3] = np.reshape(filedata['R_rect_00'], (3, 3))
R_rect_10 = np.eye(4)
R_rect_10[0:3, 0:3] = np.reshape(filedata['R_rect_01'], (3, 3))
R_rect_20 = np.eye(4)
R_rect_20[0:3, 0:3] = np.reshape(filedata['R_rect_02'], (3, 3))
R_rect_30 = np.eye(4)
R_rect_30[0:3, 0:3] = np.reshape(filedata['R_rect_03'], (3, 3))
data['R_rect_00'] = R_rect_00
data['R_rect_10'] = R_rect_10
data['R_rect_20'] = R_rect_20
data['R_rect_30'] = R_rect_30
# Compute the rectified extrinsics from cam0 to camN
T0 = np.eye(4)
T0[0, 3] = P_rect_00[0, 3] / P_rect_00[0, 0]
T1 = np.eye(4)
T1[0, 3] = P_rect_10[0, 3] / P_rect_10[0, 0]
T2 = np.eye(4)
T2[0, 3] = P_rect_20[0, 3] / P_rect_20[0, 0]
T3 = np.eye(4)
T3[0, 3] = P_rect_30[0, 3] / P_rect_30[0, 0]
# Compute the velodyne to rectified camera coordinate transforms
data['T_cam0_velo'] = T0.dot(R_rect_00.dot(T_cam0unrect_velo))
data['T_cam1_velo'] = T1.dot(R_rect_00.dot(T_cam0unrect_velo))
data['T_cam2_velo'] = T2.dot(R_rect_00.dot(T_cam0unrect_velo))
data['T_cam3_velo'] = T3.dot(R_rect_00.dot(T_cam0unrect_velo))
# Compute the camera intrinsics
data['K_cam0'] = P_rect_00[0:3, 0:3]
data['K_cam1'] = P_rect_10[0:3, 0:3]
data['K_cam2'] = P_rect_20[0:3, 0:3]
data['K_cam3'] = P_rect_30[0:3, 0:3]
# Compute the stereo baselines in meters by projecting the origin of
# each camera frame into the velodyne frame and computing the distances
# between them
p_cam = np.array([0, 0, 0, 1])
p_velo0 = np.linalg.inv(data['T_cam0_velo']).dot(p_cam)
p_velo1 = np.linalg.inv(data['T_cam1_velo']).dot(p_cam)
p_velo2 = np.linalg.inv(data['T_cam2_velo']).dot(p_cam)
p_velo3 = np.linalg.inv(data['T_cam3_velo']).dot(p_cam)
data['b_gray'] = np.linalg.norm(p_velo1 - p_velo0) # gray baseline
data['b_rgb'] = np.linalg.norm(p_velo3 - p_velo2) # rgb baseline
return data
def _load_timestamps(self):
"""Load timestamps from file."""
timestamp_file = os.path.join(
self.data_path, 'oxts', 'timestamps.txt')
# Read and parse the timestamps
self.timestamps = []
with open(timestamp_file, 'r') as f:
for line in f.readlines():
# NB: datetime only supports microseconds, but KITTI timestamps
# give nanoseconds, so need to truncate last 4 characters to
# get rid of \n (counts as 1) and extra 3 digits
t = dt.datetime.strptime(line[:-4], '%Y-%m-%d %H:%M:%S.%f')
self.timestamps.append(t)
# Subselect the chosen range of frames, if any
if self.frames is not None:
self.timestamps = [self.timestamps[i] for i in self.frames]
def _load_oxts(self):
"""Load OXTS data from file."""
self.oxts = utils.load_oxts_packets_and_poses(self.oxts_files)
|
utiasSTARS/pykitti
|
pykitti/raw.py
|
raw._load_timestamps
|
python
|
def _load_timestamps(self):
timestamp_file = os.path.join(
self.data_path, 'oxts', 'timestamps.txt')
# Read and parse the timestamps
self.timestamps = []
with open(timestamp_file, 'r') as f:
for line in f.readlines():
# NB: datetime only supports microseconds, but KITTI timestamps
# give nanoseconds, so need to truncate last 4 characters to
# get rid of \n (counts as 1) and extra 3 digits
t = dt.datetime.strptime(line[:-4], '%Y-%m-%d %H:%M:%S.%f')
self.timestamps.append(t)
# Subselect the chosen range of frames, if any
if self.frames is not None:
self.timestamps = [self.timestamps[i] for i in self.frames]
|
Load timestamps from file.
|
train
|
https://github.com/utiasSTARS/pykitti/blob/d3e1bb81676e831886726cc5ed79ce1f049aef2c/pykitti/raw.py#L246-L263
| null |
class raw:
"""Load and parse raw data into a usable format."""
def __init__(self, base_path, date, drive, **kwargs):
"""Set the path and pre-load calibration data and timestamps."""
self.dataset = kwargs.get('dataset', 'sync')
self.drive = date + '_drive_' + drive + '_' + self.dataset
self.calib_path = os.path.join(base_path, date)
self.data_path = os.path.join(base_path, date, self.drive)
self.frames = kwargs.get('frames', None)
# Default image file extension is '.png'
self.imtype = kwargs.get('imtype', 'png')
# Find all the data files
self._get_file_lists()
# Pre-load data that isn't returned as a generator
self._load_calib()
self._load_timestamps()
self._load_oxts()
def __len__(self):
"""Return the number of frames loaded."""
return len(self.timestamps)
@property
def cam0(self):
"""Generator to read image files for cam0 (monochrome left)."""
return utils.yield_images(self.cam0_files, mode='L')
def get_cam0(self, idx):
"""Read image file for cam0 (monochrome left) at the specified index."""
return utils.load_image(self.cam0_files[idx], mode='L')
@property
def cam1(self):
"""Generator to read image files for cam1 (monochrome right)."""
return utils.yield_images(self.cam1_files, mode='L')
def get_cam1(self, idx):
"""Read image file for cam1 (monochrome right) at the specified index."""
return utils.load_image(self.cam1_files[idx], mode='L')
@property
def cam2(self):
"""Generator to read image files for cam2 (RGB left)."""
return utils.yield_images(self.cam2_files, mode='RGB')
def get_cam2(self, idx):
"""Read image file for cam2 (RGB left) at the specified index."""
return utils.load_image(self.cam2_files[idx], mode='RGB')
@property
def cam3(self):
"""Generator to read image files for cam0 (RGB right)."""
return utils.yield_images(self.cam3_files, mode='RGB')
def get_cam3(self, idx):
"""Read image file for cam3 (RGB right) at the specified index."""
return utils.load_image(self.cam3_files[idx], mode='RGB')
@property
def gray(self):
"""Generator to read monochrome stereo pairs from file.
"""
return zip(self.cam0, self.cam1)
def get_gray(self, idx):
"""Read monochrome stereo pair at the specified index."""
return (self.get_cam0(idx), self.get_cam1(idx))
@property
def rgb(self):
"""Generator to read RGB stereo pairs from file.
"""
return zip(self.cam2, self.cam3)
def get_rgb(self, idx):
"""Read RGB stereo pair at the specified index."""
return (self.get_cam2(idx), self.get_cam3(idx))
@property
def velo(self):
"""Generator to read velodyne [x,y,z,reflectance] scan data from binary files."""
# Return a generator yielding Velodyne scans.
# Each scan is a Nx4 array of [x,y,z,reflectance]
return utils.yield_velo_scans(self.velo_files)
def get_velo(self, idx):
"""Read velodyne [x,y,z,reflectance] scan at the specified index."""
return utils.load_velo_scan(self.velo_files[idx])
def _get_file_lists(self):
"""Find and list data files for each sensor."""
self.oxts_files = sorted(glob.glob(
os.path.join(self.data_path, 'oxts', 'data', '*.txt')))
self.cam0_files = sorted(glob.glob(
os.path.join(self.data_path, 'image_00',
'data', '*.{}'.format(self.imtype))))
self.cam1_files = sorted(glob.glob(
os.path.join(self.data_path, 'image_01',
'data', '*.{}'.format(self.imtype))))
self.cam2_files = sorted(glob.glob(
os.path.join(self.data_path, 'image_02',
'data', '*.{}'.format(self.imtype))))
self.cam3_files = sorted(glob.glob(
os.path.join(self.data_path, 'image_03',
'data', '*.{}'.format(self.imtype))))
self.velo_files = sorted(glob.glob(
os.path.join(self.data_path, 'velodyne_points',
'data', '*.bin')))
# Subselect the chosen range of frames, if any
if self.frames is not None:
self.oxts_files = utils.subselect_files(
self.oxts_files, self.frames)
self.cam0_files = utils.subselect_files(
self.cam0_files, self.frames)
self.cam1_files = utils.subselect_files(
self.cam1_files, self.frames)
self.cam2_files = utils.subselect_files(
self.cam2_files, self.frames)
self.cam3_files = utils.subselect_files(
self.cam3_files, self.frames)
self.velo_files = utils.subselect_files(
self.velo_files, self.frames)
def _load_calib_rigid(self, filename):
"""Read a rigid transform calibration file as a numpy.array."""
filepath = os.path.join(self.calib_path, filename)
data = utils.read_calib_file(filepath)
return utils.transform_from_rot_trans(data['R'], data['T'])
def _load_calib_cam_to_cam(self, velo_to_cam_file, cam_to_cam_file):
# We'll return the camera calibration as a dictionary
data = {}
# Load the rigid transformation from velodyne coordinates
# to unrectified cam0 coordinates
T_cam0unrect_velo = self._load_calib_rigid(velo_to_cam_file)
data['T_cam0_velo_unrect'] = T_cam0unrect_velo
# Load and parse the cam-to-cam calibration data
cam_to_cam_filepath = os.path.join(self.calib_path, cam_to_cam_file)
filedata = utils.read_calib_file(cam_to_cam_filepath)
# Create 3x4 projection matrices
P_rect_00 = np.reshape(filedata['P_rect_00'], (3, 4))
P_rect_10 = np.reshape(filedata['P_rect_01'], (3, 4))
P_rect_20 = np.reshape(filedata['P_rect_02'], (3, 4))
P_rect_30 = np.reshape(filedata['P_rect_03'], (3, 4))
data['P_rect_00'] = P_rect_00
data['P_rect_10'] = P_rect_10
data['P_rect_20'] = P_rect_20
data['P_rect_30'] = P_rect_30
# Create 4x4 matrices from the rectifying rotation matrices
R_rect_00 = np.eye(4)
R_rect_00[0:3, 0:3] = np.reshape(filedata['R_rect_00'], (3, 3))
R_rect_10 = np.eye(4)
R_rect_10[0:3, 0:3] = np.reshape(filedata['R_rect_01'], (3, 3))
R_rect_20 = np.eye(4)
R_rect_20[0:3, 0:3] = np.reshape(filedata['R_rect_02'], (3, 3))
R_rect_30 = np.eye(4)
R_rect_30[0:3, 0:3] = np.reshape(filedata['R_rect_03'], (3, 3))
data['R_rect_00'] = R_rect_00
data['R_rect_10'] = R_rect_10
data['R_rect_20'] = R_rect_20
data['R_rect_30'] = R_rect_30
# Compute the rectified extrinsics from cam0 to camN
T0 = np.eye(4)
T0[0, 3] = P_rect_00[0, 3] / P_rect_00[0, 0]
T1 = np.eye(4)
T1[0, 3] = P_rect_10[0, 3] / P_rect_10[0, 0]
T2 = np.eye(4)
T2[0, 3] = P_rect_20[0, 3] / P_rect_20[0, 0]
T3 = np.eye(4)
T3[0, 3] = P_rect_30[0, 3] / P_rect_30[0, 0]
# Compute the velodyne to rectified camera coordinate transforms
data['T_cam0_velo'] = T0.dot(R_rect_00.dot(T_cam0unrect_velo))
data['T_cam1_velo'] = T1.dot(R_rect_00.dot(T_cam0unrect_velo))
data['T_cam2_velo'] = T2.dot(R_rect_00.dot(T_cam0unrect_velo))
data['T_cam3_velo'] = T3.dot(R_rect_00.dot(T_cam0unrect_velo))
# Compute the camera intrinsics
data['K_cam0'] = P_rect_00[0:3, 0:3]
data['K_cam1'] = P_rect_10[0:3, 0:3]
data['K_cam2'] = P_rect_20[0:3, 0:3]
data['K_cam3'] = P_rect_30[0:3, 0:3]
# Compute the stereo baselines in meters by projecting the origin of
# each camera frame into the velodyne frame and computing the distances
# between them
p_cam = np.array([0, 0, 0, 1])
p_velo0 = np.linalg.inv(data['T_cam0_velo']).dot(p_cam)
p_velo1 = np.linalg.inv(data['T_cam1_velo']).dot(p_cam)
p_velo2 = np.linalg.inv(data['T_cam2_velo']).dot(p_cam)
p_velo3 = np.linalg.inv(data['T_cam3_velo']).dot(p_cam)
data['b_gray'] = np.linalg.norm(p_velo1 - p_velo0) # gray baseline
data['b_rgb'] = np.linalg.norm(p_velo3 - p_velo2) # rgb baseline
return data
def _load_calib(self):
"""Load and compute intrinsic and extrinsic calibration parameters."""
# We'll build the calibration parameters as a dictionary, then
# convert it to a namedtuple to prevent it from being modified later
data = {}
# Load the rigid transformation from IMU to velodyne
data['T_velo_imu'] = self._load_calib_rigid('calib_imu_to_velo.txt')
# Load the camera intrinsics and extrinsics
data.update(self._load_calib_cam_to_cam(
'calib_velo_to_cam.txt', 'calib_cam_to_cam.txt'))
# Pre-compute the IMU to rectified camera coordinate transforms
data['T_cam0_imu'] = data['T_cam0_velo'].dot(data['T_velo_imu'])
data['T_cam1_imu'] = data['T_cam1_velo'].dot(data['T_velo_imu'])
data['T_cam2_imu'] = data['T_cam2_velo'].dot(data['T_velo_imu'])
data['T_cam3_imu'] = data['T_cam3_velo'].dot(data['T_velo_imu'])
self.calib = namedtuple('CalibData', data.keys())(*data.values())
def _load_oxts(self):
"""Load OXTS data from file."""
self.oxts = utils.load_oxts_packets_and_poses(self.oxts_files)
|
C4ptainCrunch/ics.py
|
ics/utils.py
|
arrow_get
|
python
|
def arrow_get(string):
'''this function exists because ICS uses ISO 8601 without dashes or
colons, i.e. not ISO 8601 at all.'''
# replace slashes with dashes
if '/' in string:
string = string.replace('/', '-')
# if string contains dashes, assume it to be proper ISO 8601
if '-' in string:
return arrow.get(string)
string = string.rstrip('Z')
return arrow.get(string, DATE_FORMATS[len(string)])
|
this function exists because ICS uses ISO 8601 without dashes or
colons, i.e. not ISO 8601 at all.
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/utils.py#L35-L48
| null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from arrow.arrow import Arrow
from datetime import timedelta
from six import StringIO, string_types, text_type, integer_types
from uuid import uuid4
from dateutil.tz import gettz
import arrow
import re
from . import parse
tzutc = arrow.utcnow().tzinfo
def remove_x(container):
for i in reversed(range(len(container))):
item = container[i]
if item.name.startswith('X-'):
del container[i]
DATE_FORMATS = dict((len(k), k) for k in (
'YYYYMM',
'YYYYMMDD',
'YYYYMMDDTHH',
'YYYYMMDDTHHmm',
'YYYYMMDDTHHmmss'))
def iso_to_arrow(time_container, available_tz={}):
if time_container is None:
return None
# TODO : raise if not iso date
tz_list = time_container.params.get('TZID')
# TODO : raise if len(tz_list) > 1 or if tz is not a valid tz
# TODO : see if timezone is registered as a VTIMEZONE
if tz_list and len(tz_list) > 0:
tz = tz_list[0]
else:
tz = None
if ('T' not in time_container.value) and \
'DATE' in time_container.params.get('VALUE', []):
val = time_container.value + 'T0000'
else:
val = time_container.value
if tz and not (val[-1].upper() == 'Z'):
naive = arrow_get(val).naive
selected_tz = gettz(tz)
if not selected_tz:
selected_tz = available_tz.get(tz, 'UTC')
return arrow.get(naive, selected_tz)
else:
return arrow_get(val)
# TODO : support floating (ie not bound to any time zone) times (cf
# http://www.kanzaki.com/docs/ical/dateTime.html)
def iso_precision(string):
has_time = 'T' in string
if has_time:
date_string, time_string = string.split('T', 1)
time_parts = re.split('[+-]', time_string, 1)
has_seconds = time_parts[0].count(':') > 1
has_seconds = not has_seconds and len(time_parts[0]) == 6
if has_seconds:
return 'second'
else:
return 'minute'
else:
return 'day'
def get_lines(container, name):
lines = []
for i in reversed(range(len(container))):
item = container[i]
if item.name == name:
lines.append(item)
del container[i]
return lines
def parse_duration(line):
"""
Return a timedelta object from a string in the DURATION property format
"""
DAYS, SECS = {'D': 1, 'W': 7}, {'S': 1, 'M': 60, 'H': 3600}
sign, i = 1, 0
if line[i] in '-+':
if line[i] == '-':
sign = -1
i += 1
if line[i] != 'P':
raise parse.ParseError()
i += 1
days, secs = 0, 0
while i < len(line):
if line[i] == 'T':
i += 1
if i == len(line):
break
j = i
while line[j].isdigit():
j += 1
if i == j:
raise parse.ParseError()
val = int(line[i:j])
if line[j] in DAYS:
days += val * DAYS[line[j]]
DAYS.pop(line[j])
elif line[j] in SECS:
secs += val * SECS[line[j]]
SECS.pop(line[j])
else:
raise parse.ParseError()
i = j + 1
return timedelta(sign * days, sign * secs)
def timedelta_to_duration(dt):
"""
Return a string according to the DURATION property format
from a timedelta object
"""
days, secs = dt.days, dt.seconds
res = 'P'
if days // 7:
res += str(days // 7) + 'W'
days %= 7
if days:
res += str(days) + 'D'
if secs:
res += 'T'
if secs // 3600:
res += str(secs // 3600) + 'H'
secs %= 3600
if secs // 60:
res += str(secs // 60) + 'M'
secs %= 60
if secs:
res += str(secs) + 'S'
return res
def get_arrow(value):
if value is None:
return None
elif isinstance(value, Arrow):
return value
elif isinstance(value, tuple):
return arrow.get(*value)
elif isinstance(value, dict):
return arrow.get(**value)
else:
return arrow.get(value)
def arrow_to_iso(instant):
# set to utc, make iso, remove timezone
instant = arrow.get(instant.astimezone(tzutc)).format('YYYYMMDDTHHmmss')
return instant + 'Z'
def arrow_date_to_iso(instant):
# date-only for all day events
# set to utc, make iso, remove timezone
instant = arrow.get(instant.astimezone(tzutc)).format('YYYYMMDD')
return instant # no TZ for all days
def uid_gen():
uid = str(uuid4())
return "{}@{}.org".format(uid, uid[:4])
def escape_string(string):
string = string.replace("\\", "\\\\")
string = string.replace(";", "\\;")
string = string.replace(",", "\\,")
string = string.replace("\n", "\\n")
string = string.replace("\r", "\\r")
return string
def unescape_string(string):
string = string.replace("\\;", ";")
string = string.replace("\\,", ",")
string = string.replace("\\n", "\n")
string = string.replace("\\N", "\n")
string = string.replace("\\r", "\r")
string = string.replace("\\R", "\r")
string = string.replace("\\\\", "\\")
return string
|
C4ptainCrunch/ics.py
|
ics/utils.py
|
parse_duration
|
python
|
def parse_duration(line):
DAYS, SECS = {'D': 1, 'W': 7}, {'S': 1, 'M': 60, 'H': 3600}
sign, i = 1, 0
if line[i] in '-+':
if line[i] == '-':
sign = -1
i += 1
if line[i] != 'P':
raise parse.ParseError()
i += 1
days, secs = 0, 0
while i < len(line):
if line[i] == 'T':
i += 1
if i == len(line):
break
j = i
while line[j].isdigit():
j += 1
if i == j:
raise parse.ParseError()
val = int(line[i:j])
if line[j] in DAYS:
days += val * DAYS[line[j]]
DAYS.pop(line[j])
elif line[j] in SECS:
secs += val * SECS[line[j]]
SECS.pop(line[j])
else:
raise parse.ParseError()
i = j + 1
return timedelta(sign * days, sign * secs)
|
Return a timedelta object from a string in the DURATION property format
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/utils.py#L109-L143
| null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from arrow.arrow import Arrow
from datetime import timedelta
from six import StringIO, string_types, text_type, integer_types
from uuid import uuid4
from dateutil.tz import gettz
import arrow
import re
from . import parse
tzutc = arrow.utcnow().tzinfo
def remove_x(container):
for i in reversed(range(len(container))):
item = container[i]
if item.name.startswith('X-'):
del container[i]
DATE_FORMATS = dict((len(k), k) for k in (
'YYYYMM',
'YYYYMMDD',
'YYYYMMDDTHH',
'YYYYMMDDTHHmm',
'YYYYMMDDTHHmmss'))
def arrow_get(string):
'''this function exists because ICS uses ISO 8601 without dashes or
colons, i.e. not ISO 8601 at all.'''
# replace slashes with dashes
if '/' in string:
string = string.replace('/', '-')
# if string contains dashes, assume it to be proper ISO 8601
if '-' in string:
return arrow.get(string)
string = string.rstrip('Z')
return arrow.get(string, DATE_FORMATS[len(string)])
def iso_to_arrow(time_container, available_tz={}):
if time_container is None:
return None
# TODO : raise if not iso date
tz_list = time_container.params.get('TZID')
# TODO : raise if len(tz_list) > 1 or if tz is not a valid tz
# TODO : see if timezone is registered as a VTIMEZONE
if tz_list and len(tz_list) > 0:
tz = tz_list[0]
else:
tz = None
if ('T' not in time_container.value) and \
'DATE' in time_container.params.get('VALUE', []):
val = time_container.value + 'T0000'
else:
val = time_container.value
if tz and not (val[-1].upper() == 'Z'):
naive = arrow_get(val).naive
selected_tz = gettz(tz)
if not selected_tz:
selected_tz = available_tz.get(tz, 'UTC')
return arrow.get(naive, selected_tz)
else:
return arrow_get(val)
# TODO : support floating (ie not bound to any time zone) times (cf
# http://www.kanzaki.com/docs/ical/dateTime.html)
def iso_precision(string):
has_time = 'T' in string
if has_time:
date_string, time_string = string.split('T', 1)
time_parts = re.split('[+-]', time_string, 1)
has_seconds = time_parts[0].count(':') > 1
has_seconds = not has_seconds and len(time_parts[0]) == 6
if has_seconds:
return 'second'
else:
return 'minute'
else:
return 'day'
def get_lines(container, name):
lines = []
for i in reversed(range(len(container))):
item = container[i]
if item.name == name:
lines.append(item)
del container[i]
return lines
def timedelta_to_duration(dt):
"""
Return a string according to the DURATION property format
from a timedelta object
"""
days, secs = dt.days, dt.seconds
res = 'P'
if days // 7:
res += str(days // 7) + 'W'
days %= 7
if days:
res += str(days) + 'D'
if secs:
res += 'T'
if secs // 3600:
res += str(secs // 3600) + 'H'
secs %= 3600
if secs // 60:
res += str(secs // 60) + 'M'
secs %= 60
if secs:
res += str(secs) + 'S'
return res
def get_arrow(value):
if value is None:
return None
elif isinstance(value, Arrow):
return value
elif isinstance(value, tuple):
return arrow.get(*value)
elif isinstance(value, dict):
return arrow.get(**value)
else:
return arrow.get(value)
def arrow_to_iso(instant):
# set to utc, make iso, remove timezone
instant = arrow.get(instant.astimezone(tzutc)).format('YYYYMMDDTHHmmss')
return instant + 'Z'
def arrow_date_to_iso(instant):
# date-only for all day events
# set to utc, make iso, remove timezone
instant = arrow.get(instant.astimezone(tzutc)).format('YYYYMMDD')
return instant # no TZ for all days
def uid_gen():
uid = str(uuid4())
return "{}@{}.org".format(uid, uid[:4])
def escape_string(string):
string = string.replace("\\", "\\\\")
string = string.replace(";", "\\;")
string = string.replace(",", "\\,")
string = string.replace("\n", "\\n")
string = string.replace("\r", "\\r")
return string
def unescape_string(string):
string = string.replace("\\;", ";")
string = string.replace("\\,", ",")
string = string.replace("\\n", "\n")
string = string.replace("\\N", "\n")
string = string.replace("\\r", "\r")
string = string.replace("\\R", "\r")
string = string.replace("\\\\", "\\")
return string
|
C4ptainCrunch/ics.py
|
ics/utils.py
|
timedelta_to_duration
|
python
|
def timedelta_to_duration(dt):
days, secs = dt.days, dt.seconds
res = 'P'
if days // 7:
res += str(days // 7) + 'W'
days %= 7
if days:
res += str(days) + 'D'
if secs:
res += 'T'
if secs // 3600:
res += str(secs // 3600) + 'H'
secs %= 3600
if secs // 60:
res += str(secs // 60) + 'M'
secs %= 60
if secs:
res += str(secs) + 'S'
return res
|
Return a string according to the DURATION property format
from a timedelta object
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/utils.py#L146-L168
| null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from arrow.arrow import Arrow
from datetime import timedelta
from six import StringIO, string_types, text_type, integer_types
from uuid import uuid4
from dateutil.tz import gettz
import arrow
import re
from . import parse
tzutc = arrow.utcnow().tzinfo
def remove_x(container):
for i in reversed(range(len(container))):
item = container[i]
if item.name.startswith('X-'):
del container[i]
DATE_FORMATS = dict((len(k), k) for k in (
'YYYYMM',
'YYYYMMDD',
'YYYYMMDDTHH',
'YYYYMMDDTHHmm',
'YYYYMMDDTHHmmss'))
def arrow_get(string):
'''this function exists because ICS uses ISO 8601 without dashes or
colons, i.e. not ISO 8601 at all.'''
# replace slashes with dashes
if '/' in string:
string = string.replace('/', '-')
# if string contains dashes, assume it to be proper ISO 8601
if '-' in string:
return arrow.get(string)
string = string.rstrip('Z')
return arrow.get(string, DATE_FORMATS[len(string)])
def iso_to_arrow(time_container, available_tz={}):
if time_container is None:
return None
# TODO : raise if not iso date
tz_list = time_container.params.get('TZID')
# TODO : raise if len(tz_list) > 1 or if tz is not a valid tz
# TODO : see if timezone is registered as a VTIMEZONE
if tz_list and len(tz_list) > 0:
tz = tz_list[0]
else:
tz = None
if ('T' not in time_container.value) and \
'DATE' in time_container.params.get('VALUE', []):
val = time_container.value + 'T0000'
else:
val = time_container.value
if tz and not (val[-1].upper() == 'Z'):
naive = arrow_get(val).naive
selected_tz = gettz(tz)
if not selected_tz:
selected_tz = available_tz.get(tz, 'UTC')
return arrow.get(naive, selected_tz)
else:
return arrow_get(val)
# TODO : support floating (ie not bound to any time zone) times (cf
# http://www.kanzaki.com/docs/ical/dateTime.html)
def iso_precision(string):
has_time = 'T' in string
if has_time:
date_string, time_string = string.split('T', 1)
time_parts = re.split('[+-]', time_string, 1)
has_seconds = time_parts[0].count(':') > 1
has_seconds = not has_seconds and len(time_parts[0]) == 6
if has_seconds:
return 'second'
else:
return 'minute'
else:
return 'day'
def get_lines(container, name):
lines = []
for i in reversed(range(len(container))):
item = container[i]
if item.name == name:
lines.append(item)
del container[i]
return lines
def parse_duration(line):
"""
Return a timedelta object from a string in the DURATION property format
"""
DAYS, SECS = {'D': 1, 'W': 7}, {'S': 1, 'M': 60, 'H': 3600}
sign, i = 1, 0
if line[i] in '-+':
if line[i] == '-':
sign = -1
i += 1
if line[i] != 'P':
raise parse.ParseError()
i += 1
days, secs = 0, 0
while i < len(line):
if line[i] == 'T':
i += 1
if i == len(line):
break
j = i
while line[j].isdigit():
j += 1
if i == j:
raise parse.ParseError()
val = int(line[i:j])
if line[j] in DAYS:
days += val * DAYS[line[j]]
DAYS.pop(line[j])
elif line[j] in SECS:
secs += val * SECS[line[j]]
SECS.pop(line[j])
else:
raise parse.ParseError()
i = j + 1
return timedelta(sign * days, sign * secs)
def get_arrow(value):
if value is None:
return None
elif isinstance(value, Arrow):
return value
elif isinstance(value, tuple):
return arrow.get(*value)
elif isinstance(value, dict):
return arrow.get(**value)
else:
return arrow.get(value)
def arrow_to_iso(instant):
# set to utc, make iso, remove timezone
instant = arrow.get(instant.astimezone(tzutc)).format('YYYYMMDDTHHmmss')
return instant + 'Z'
def arrow_date_to_iso(instant):
# date-only for all day events
# set to utc, make iso, remove timezone
instant = arrow.get(instant.astimezone(tzutc)).format('YYYYMMDD')
return instant # no TZ for all days
def uid_gen():
uid = str(uuid4())
return "{}@{}.org".format(uid, uid[:4])
def escape_string(string):
string = string.replace("\\", "\\\\")
string = string.replace(";", "\\;")
string = string.replace(",", "\\,")
string = string.replace("\n", "\\n")
string = string.replace("\r", "\\r")
return string
def unescape_string(string):
string = string.replace("\\;", ";")
string = string.replace("\\,", ",")
string = string.replace("\\n", "\n")
string = string.replace("\\N", "\n")
string = string.replace("\\r", "\r")
string = string.replace("\\R", "\r")
string = string.replace("\\\\", "\\")
return string
|
C4ptainCrunch/ics.py
|
ics/alarm.py
|
Alarm.clone
|
python
|
def clone(self):
clone = copy.copy(self)
clone._unused = clone._unused.clone()
return clone
|
Returns:
Alarm: an exact copy of self
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/alarm.py#L171-L177
| null |
class Alarm(Component):
"""
A calendar event VALARM base class
"""
_TYPE = 'VALARM'
_EXTRACTORS = []
_OUTPUTS = []
def __init__(self,
trigger=None,
repeat=None,
duration=None):
"""
Instantiates a new :class:`ics.alarm.Alarm`.
Adheres to RFC5545 VALARM standard: http://icalendar.org/iCalendar-RFC-5545/3-6-6-alarm-component.html
Args:
trigger (datetime.timedelta OR datetime.datetime) : Timespan to alert before parent action, or absolute time to alert
repeat (integer) : How many times to repeat the alarm
duration (datetime.timedelta) : Duration between repeats
Raises:
ValueError: If trigger, repeat, or duration do not match the RFC spec.
"""
# Set initial values
self._trigger = None
self._repeat = None
self._duration = None
# Validate and parse
self.trigger = trigger
# XOR repeat and duration
if (repeat is None) ^ (duration is None):
raise ValueError('If either repeat or duration is specified, both must be specified')
if repeat:
self.repeat = repeat
if duration:
self.duration = duration
self._unused = Container(name='VALARM')
@property
def trigger(self):
"""The trigger condition for the alarm
| Returns either a timedelta or datetime object
| Timedelta must have positive total_seconds()
| Datetime object is also allowed.
"""
return self._trigger
@trigger.setter
def trigger(self, value):
if type(value) is timedelta and value.total_seconds() < 0:
raise ValueError('Trigger timespan must be positive')
elif type(value) is not timedelta:
value = get_arrow(value)
self._trigger = value
@property
def repeat(self):
"""Number of times to repeat alarm
| Returns an integer for number of alarm repeats
| Value must be >= 0
"""
return self._repeat
@repeat.setter
def repeat(self, value):
if value < 0:
raise ValueError('Repeat must be great than or equal to 0.')
self._repeat = value
@property
def duration(self):
"""Duration between alarm repeats
| Returns a timedelta object
| Timespan must return positive total_seconds() value
"""
return self._duration
@duration.setter
def duration(self, value):
if value.total_seconds() < 0:
raise ValueError('Alarm duration timespan must be positive.')
self._duration = value
@property
def action(self):
""" VALARM action to be implemented by concrete classes
"""
raise NotImplementedError('Base class cannot be instantiated directly')
def __repr__(self):
value = '{0} trigger:{1}'.format(type(self), self.trigger)
if self.repeat:
value += ' repeat:{0} duration:{1}'.format(self.repeat, self.duration)
return '<{0}>'.format(value)
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
"""Two alarms are considered equal if they have the same type and base values."""
return (type(self) is type(other) and
self.trigger == other.trigger and
self.repeat == other.repeat and
self.duration == other.duration)
|
C4ptainCrunch/ics.py
|
ics/timeline.py
|
Timeline.included
|
python
|
def included(self, start, stop):
for event in self:
if (start <= event.begin <= stop # if start is between the bonds
and start <= event.end <= stop): # and stop is between the bonds
yield event
|
Iterates (in chronological order) over every event that is included
in the timespan between `start` and `stop`
Args:
start : (Arrow object)
stop : (Arrow object)
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/timeline.py#L42-L53
| null |
class Timeline(object):
def __init__(self, calendar):
"""Instanciates a new Timeline.
(You should not have to instanciate a new timeline by yourself)
Args:
calendar : :class:`ics.icalendar.Calendar`
"""
self._calendar = calendar
def __iter__(self):
"""Iterates on every event from the :class:`ics.icalendar.Calendar` in chronological order
Note :
- chronological order is defined by the comparaison operators in :class:`ics.event.Event`
- Events with no `begin` will not appear here. (To list all events in a `Calendar` use `Calendar.events`)
"""
# Using a heap is faster than sorting if the number of events (n) is
# much bigger than the number of events we extract from the iterator (k).
# Complexity: O(n + k log n).
heap = [x for x in self._calendar.events if x.begin is not None]
heapq.heapify(heap)
while heap:
yield heapq.heappop(heap)
def overlapping(self, start, stop):
"""Iterates (in chronological order) over every event that has an intersection
with the timespan between `start` and `stop`
Args:
start : (Arrow object)
stop : (Arrow object)
"""
for event in self:
if ((start <= event.begin <= stop # if start is between the bonds
or start <= event.end <= stop) # or stop is between the bonds
or event.begin <= start and event.end >= stop): # or event is a superset of [start,stop]
yield event
def start_after(self, instant):
"""Iterates (in chronological order) on every event from the :class:`ics.icalendar.Calendar` in chronological order.
The first event of the iteration has a starting date greater (later) than `instant`
Args:
instant : (Arrow object) starting point of the iteration
"""
for event in self:
if event.begin > instant:
yield event
def at(self, instant):
"""Iterates (in chronological order) over all events that are occuring during `instant`.
Args:
instant (Arrow object)
"""
for event in self:
if event.begin <= instant <= event.end:
yield event
def on(self, day, strict=False):
"""Iterates (in chronological order) over all events that occurs on `day`
Args:
day (Arrow object)
strict (bool): if True events will be returned only if they are\
strictly *included* in `day`.
"""
day_start, day_stop = day.floor('day').span('day')
if strict:
return self.included(day_start, day_stop)
else:
return self.overlapping(day_start, day_stop)
def today(self, strict=False):
"""Iterates (in chronological order) over all events that occurs today
Args:
strict (bool): if True events will be returned only if they are\
strictly *included* in `day`.
"""
return self.on(arrow.now(), strict=strict)
def now(self):
"""Iterates (in chronological order) over all events that occurs now
"""
return self.at(arrow.now())
|
C4ptainCrunch/ics.py
|
ics/timeline.py
|
Timeline.overlapping
|
python
|
def overlapping(self, start, stop):
for event in self:
if ((start <= event.begin <= stop # if start is between the bonds
or start <= event.end <= stop) # or stop is between the bonds
or event.begin <= start and event.end >= stop): # or event is a superset of [start,stop]
yield event
|
Iterates (in chronological order) over every event that has an intersection
with the timespan between `start` and `stop`
Args:
start : (Arrow object)
stop : (Arrow object)
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/timeline.py#L55-L67
| null |
class Timeline(object):
def __init__(self, calendar):
"""Instanciates a new Timeline.
(You should not have to instanciate a new timeline by yourself)
Args:
calendar : :class:`ics.icalendar.Calendar`
"""
self._calendar = calendar
def __iter__(self):
"""Iterates on every event from the :class:`ics.icalendar.Calendar` in chronological order
Note :
- chronological order is defined by the comparaison operators in :class:`ics.event.Event`
- Events with no `begin` will not appear here. (To list all events in a `Calendar` use `Calendar.events`)
"""
# Using a heap is faster than sorting if the number of events (n) is
# much bigger than the number of events we extract from the iterator (k).
# Complexity: O(n + k log n).
heap = [x for x in self._calendar.events if x.begin is not None]
heapq.heapify(heap)
while heap:
yield heapq.heappop(heap)
def included(self, start, stop):
"""Iterates (in chronological order) over every event that is included
in the timespan between `start` and `stop`
Args:
start : (Arrow object)
stop : (Arrow object)
"""
for event in self:
if (start <= event.begin <= stop # if start is between the bonds
and start <= event.end <= stop): # and stop is between the bonds
yield event
def start_after(self, instant):
"""Iterates (in chronological order) on every event from the :class:`ics.icalendar.Calendar` in chronological order.
The first event of the iteration has a starting date greater (later) than `instant`
Args:
instant : (Arrow object) starting point of the iteration
"""
for event in self:
if event.begin > instant:
yield event
def at(self, instant):
"""Iterates (in chronological order) over all events that are occuring during `instant`.
Args:
instant (Arrow object)
"""
for event in self:
if event.begin <= instant <= event.end:
yield event
def on(self, day, strict=False):
"""Iterates (in chronological order) over all events that occurs on `day`
Args:
day (Arrow object)
strict (bool): if True events will be returned only if they are\
strictly *included* in `day`.
"""
day_start, day_stop = day.floor('day').span('day')
if strict:
return self.included(day_start, day_stop)
else:
return self.overlapping(day_start, day_stop)
def today(self, strict=False):
"""Iterates (in chronological order) over all events that occurs today
Args:
strict (bool): if True events will be returned only if they are\
strictly *included* in `day`.
"""
return self.on(arrow.now(), strict=strict)
def now(self):
"""Iterates (in chronological order) over all events that occurs now
"""
return self.at(arrow.now())
|
C4ptainCrunch/ics.py
|
ics/timeline.py
|
Timeline.at
|
python
|
def at(self, instant):
for event in self:
if event.begin <= instant <= event.end:
yield event
|
Iterates (in chronological order) over all events that are occuring during `instant`.
Args:
instant (Arrow object)
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/timeline.py#L80-L89
| null |
class Timeline(object):
def __init__(self, calendar):
"""Instanciates a new Timeline.
(You should not have to instanciate a new timeline by yourself)
Args:
calendar : :class:`ics.icalendar.Calendar`
"""
self._calendar = calendar
def __iter__(self):
"""Iterates on every event from the :class:`ics.icalendar.Calendar` in chronological order
Note :
- chronological order is defined by the comparaison operators in :class:`ics.event.Event`
- Events with no `begin` will not appear here. (To list all events in a `Calendar` use `Calendar.events`)
"""
# Using a heap is faster than sorting if the number of events (n) is
# much bigger than the number of events we extract from the iterator (k).
# Complexity: O(n + k log n).
heap = [x for x in self._calendar.events if x.begin is not None]
heapq.heapify(heap)
while heap:
yield heapq.heappop(heap)
def included(self, start, stop):
"""Iterates (in chronological order) over every event that is included
in the timespan between `start` and `stop`
Args:
start : (Arrow object)
stop : (Arrow object)
"""
for event in self:
if (start <= event.begin <= stop # if start is between the bonds
and start <= event.end <= stop): # and stop is between the bonds
yield event
def overlapping(self, start, stop):
"""Iterates (in chronological order) over every event that has an intersection
with the timespan between `start` and `stop`
Args:
start : (Arrow object)
stop : (Arrow object)
"""
for event in self:
if ((start <= event.begin <= stop # if start is between the bonds
or start <= event.end <= stop) # or stop is between the bonds
or event.begin <= start and event.end >= stop): # or event is a superset of [start,stop]
yield event
def start_after(self, instant):
"""Iterates (in chronological order) on every event from the :class:`ics.icalendar.Calendar` in chronological order.
The first event of the iteration has a starting date greater (later) than `instant`
Args:
instant : (Arrow object) starting point of the iteration
"""
for event in self:
if event.begin > instant:
yield event
def on(self, day, strict=False):
"""Iterates (in chronological order) over all events that occurs on `day`
Args:
day (Arrow object)
strict (bool): if True events will be returned only if they are\
strictly *included* in `day`.
"""
day_start, day_stop = day.floor('day').span('day')
if strict:
return self.included(day_start, day_stop)
else:
return self.overlapping(day_start, day_stop)
def today(self, strict=False):
"""Iterates (in chronological order) over all events that occurs today
Args:
strict (bool): if True events will be returned only if they are\
strictly *included* in `day`.
"""
return self.on(arrow.now(), strict=strict)
def now(self):
"""Iterates (in chronological order) over all events that occurs now
"""
return self.at(arrow.now())
|
C4ptainCrunch/ics.py
|
ics/timeline.py
|
Timeline.on
|
python
|
def on(self, day, strict=False):
day_start, day_stop = day.floor('day').span('day')
if strict:
return self.included(day_start, day_stop)
else:
return self.overlapping(day_start, day_stop)
|
Iterates (in chronological order) over all events that occurs on `day`
Args:
day (Arrow object)
strict (bool): if True events will be returned only if they are\
strictly *included* in `day`.
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/timeline.py#L91-L103
|
[
"def included(self, start, stop):\n \"\"\"Iterates (in chronological order) over every event that is included\n in the timespan between `start` and `stop`\n\n Args:\n start : (Arrow object)\n stop : (Arrow object)\n \"\"\"\n for event in self:\n if (start <= event.begin <= stop # if start is between the bonds\n and start <= event.end <= stop): # and stop is between the bonds\n yield event\n",
"def overlapping(self, start, stop):\n \"\"\"Iterates (in chronological order) over every event that has an intersection\n with the timespan between `start` and `stop`\n\n Args:\n start : (Arrow object)\n stop : (Arrow object)\n \"\"\"\n for event in self:\n if ((start <= event.begin <= stop # if start is between the bonds\n or start <= event.end <= stop) # or stop is between the bonds\n or event.begin <= start and event.end >= stop): # or event is a superset of [start,stop]\n yield event\n"
] |
class Timeline(object):
def __init__(self, calendar):
"""Instanciates a new Timeline.
(You should not have to instanciate a new timeline by yourself)
Args:
calendar : :class:`ics.icalendar.Calendar`
"""
self._calendar = calendar
def __iter__(self):
"""Iterates on every event from the :class:`ics.icalendar.Calendar` in chronological order
Note :
- chronological order is defined by the comparaison operators in :class:`ics.event.Event`
- Events with no `begin` will not appear here. (To list all events in a `Calendar` use `Calendar.events`)
"""
# Using a heap is faster than sorting if the number of events (n) is
# much bigger than the number of events we extract from the iterator (k).
# Complexity: O(n + k log n).
heap = [x for x in self._calendar.events if x.begin is not None]
heapq.heapify(heap)
while heap:
yield heapq.heappop(heap)
def included(self, start, stop):
"""Iterates (in chronological order) over every event that is included
in the timespan between `start` and `stop`
Args:
start : (Arrow object)
stop : (Arrow object)
"""
for event in self:
if (start <= event.begin <= stop # if start is between the bonds
and start <= event.end <= stop): # and stop is between the bonds
yield event
def overlapping(self, start, stop):
"""Iterates (in chronological order) over every event that has an intersection
with the timespan between `start` and `stop`
Args:
start : (Arrow object)
stop : (Arrow object)
"""
for event in self:
if ((start <= event.begin <= stop # if start is between the bonds
or start <= event.end <= stop) # or stop is between the bonds
or event.begin <= start and event.end >= stop): # or event is a superset of [start,stop]
yield event
def start_after(self, instant):
"""Iterates (in chronological order) on every event from the :class:`ics.icalendar.Calendar` in chronological order.
The first event of the iteration has a starting date greater (later) than `instant`
Args:
instant : (Arrow object) starting point of the iteration
"""
for event in self:
if event.begin > instant:
yield event
def at(self, instant):
"""Iterates (in chronological order) over all events that are occuring during `instant`.
Args:
instant (Arrow object)
"""
for event in self:
if event.begin <= instant <= event.end:
yield event
def today(self, strict=False):
"""Iterates (in chronological order) over all events that occurs today
Args:
strict (bool): if True events will be returned only if they are\
strictly *included* in `day`.
"""
return self.on(arrow.now(), strict=strict)
def now(self):
"""Iterates (in chronological order) over all events that occurs now
"""
return self.at(arrow.now())
|
C4ptainCrunch/ics.py
|
ics/timeline.py
|
Timeline.today
|
python
|
def today(self, strict=False):
return self.on(arrow.now(), strict=strict)
|
Iterates (in chronological order) over all events that occurs today
Args:
strict (bool): if True events will be returned only if they are\
strictly *included* in `day`.
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/timeline.py#L105-L112
|
[
"def on(self, day, strict=False):\n \"\"\"Iterates (in chronological order) over all events that occurs on `day`\n\n Args:\n day (Arrow object)\n strict (bool): if True events will be returned only if they are\\\n strictly *included* in `day`.\n \"\"\"\n day_start, day_stop = day.floor('day').span('day')\n if strict:\n return self.included(day_start, day_stop)\n else:\n return self.overlapping(day_start, day_stop)\n"
] |
class Timeline(object):
def __init__(self, calendar):
"""Instanciates a new Timeline.
(You should not have to instanciate a new timeline by yourself)
Args:
calendar : :class:`ics.icalendar.Calendar`
"""
self._calendar = calendar
def __iter__(self):
"""Iterates on every event from the :class:`ics.icalendar.Calendar` in chronological order
Note :
- chronological order is defined by the comparaison operators in :class:`ics.event.Event`
- Events with no `begin` will not appear here. (To list all events in a `Calendar` use `Calendar.events`)
"""
# Using a heap is faster than sorting if the number of events (n) is
# much bigger than the number of events we extract from the iterator (k).
# Complexity: O(n + k log n).
heap = [x for x in self._calendar.events if x.begin is not None]
heapq.heapify(heap)
while heap:
yield heapq.heappop(heap)
def included(self, start, stop):
"""Iterates (in chronological order) over every event that is included
in the timespan between `start` and `stop`
Args:
start : (Arrow object)
stop : (Arrow object)
"""
for event in self:
if (start <= event.begin <= stop # if start is between the bonds
and start <= event.end <= stop): # and stop is between the bonds
yield event
def overlapping(self, start, stop):
"""Iterates (in chronological order) over every event that has an intersection
with the timespan between `start` and `stop`
Args:
start : (Arrow object)
stop : (Arrow object)
"""
for event in self:
if ((start <= event.begin <= stop # if start is between the bonds
or start <= event.end <= stop) # or stop is between the bonds
or event.begin <= start and event.end >= stop): # or event is a superset of [start,stop]
yield event
def start_after(self, instant):
"""Iterates (in chronological order) on every event from the :class:`ics.icalendar.Calendar` in chronological order.
The first event of the iteration has a starting date greater (later) than `instant`
Args:
instant : (Arrow object) starting point of the iteration
"""
for event in self:
if event.begin > instant:
yield event
def at(self, instant):
"""Iterates (in chronological order) over all events that are occuring during `instant`.
Args:
instant (Arrow object)
"""
for event in self:
if event.begin <= instant <= event.end:
yield event
def on(self, day, strict=False):
"""Iterates (in chronological order) over all events that occurs on `day`
Args:
day (Arrow object)
strict (bool): if True events will be returned only if they are\
strictly *included* in `day`.
"""
day_start, day_stop = day.floor('day').span('day')
if strict:
return self.included(day_start, day_stop)
else:
return self.overlapping(day_start, day_stop)
def now(self):
"""Iterates (in chronological order) over all events that occurs now
"""
return self.at(arrow.now())
|
C4ptainCrunch/ics.py
|
ics/event.py
|
Event.end
|
python
|
def end(self):
if self._duration: # if end is duration defined
# return the beginning + duration
return self.begin + self._duration
elif self._end_time: # if end is time defined
if self.all_day:
return self._end_time
else:
return self._end_time
elif self._begin: # if end is not defined
if self.all_day:
return self._begin + timedelta(days=1)
else:
# instant event
return self._begin
else:
return None
|
Get or set the end of the event.
| Will return an :class:`Arrow` object.
| May be set to anything that :func:`Arrow.get` understands.
| If set to a non null value, removes any already
existing duration.
| Setting to None will have unexpected behavior if
begin is not None.
| Must not be set to an inferior value than self.begin.
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/event.py#L139-L166
|
[
"def get_arrow(value):\n if value is None:\n return None\n elif isinstance(value, Arrow):\n return value\n elif isinstance(value, tuple):\n return arrow.get(*value)\n elif isinstance(value, dict):\n return arrow.get(**value)\n else:\n return arrow.get(value)\n"
] |
class Event(Component):
"""A calendar event.
Can be full-day or between two instants.
Can be defined by a beginning instant and
a duration *or* end instant.
"""
_TYPE = "VEVENT"
_EXTRACTORS = []
_OUTPUTS = []
def __init__(self,
name=None,
begin=None,
end=None,
duration=None,
uid=None,
description=None,
created=None,
location=None,
url=None,
transparent=False,
alarms=None,
categories=None,
status=None,
):
"""Instantiates a new :class:`ics.event.Event`.
Args:
name (string) : rfc5545 SUMMARY property
begin (Arrow-compatible)
end (Arrow-compatible)
duration (datetime.timedelta)
uid (string): must be unique
description (string)
created (Arrow-compatible)
location (string)
url (string)
transparent (Boolean)
alarms (:class:`ics.alarm.Alarm`)
categories (set of string)
status (string)
Raises:
ValueError: if `end` and `duration` are specified at the same time
"""
self._duration = None
self._end_time = None
self._begin = None
self._begin_precision = None
self.uid = uid_gen() if not uid else uid
self.description = description
self.created = get_arrow(created)
self.location = location
self.url = url
self.transparent = transparent
self.alarms = set()
self.categories = set()
self._unused = Container(name='VEVENT')
self.name = name
self.begin = begin
# TODO: DRY [1]
if duration and end:
raise ValueError(
'Event() may not specify an end and a duration \
at the same time')
elif end: # End was specified
self.end = end
elif duration: # Duration was specified
self.duration = duration
if alarms is not None:
self.alarms.update(set(alarms))
self.status = status
if categories is not None:
self.categories.update(set(categories))
def has_end(self):
"""
Return:
bool: self has an end
"""
return bool(self._end_time or self._duration)
@property
def begin(self):
"""Get or set the beginning of the event.
| Will return an :class:`Arrow` object.
| May be set to anything that :func:`Arrow.get` understands.
| If an end is defined (not a duration), .begin must not
be set to a superior value.
"""
return self._begin
@begin.setter
def begin(self, value):
value = get_arrow(value)
if value and self._end_time and value > self._end_time:
raise ValueError('Begin must be before end')
self._begin = value
self._begin_precision = 'second'
@property
@end.setter
def end(self, value):
value = get_arrow(value)
if value and self._begin and value < self._begin:
raise ValueError('End must be after begin')
self._end_time = value
if value:
self._duration = None
@property
def duration(self):
"""Get or set the duration of the event.
| Will return a timedelta object.
| May be set to anything that timedelta() understands.
| May be set with a dict ({"days":2, "hours":6}).
| If set to a non null value, removes any already
existing end time.
"""
if self._duration:
return self._duration
elif self.end:
# because of the clever getter for end, this also takes care of all_day events
return self.end - self.begin
else:
# event has neither start, nor end, nor duration
return None
@duration.setter
def duration(self, value):
if isinstance(value, dict):
value = timedelta(**value)
elif isinstance(value, timedelta):
value = value
elif value is not None:
value = timedelta(value)
if value:
self._end_time = None
self._duration = value
@property
def all_day(self):
"""
Return:
bool: self is an all-day event
"""
# the event may have an end, also given in 'day' precision
return self._begin_precision == 'day'
def make_all_day(self):
"""Transforms self to an all-day event.
The event will span all the days from the begin to the end day.
"""
if self.all_day:
# Do nothing if we already are a all day event
return
begin_day = self.begin.floor('day')
end_day = self.end.floor('day')
self._begin = begin_day
# for a one day event, we don't need a _end_time
if begin_day == end_day:
self._end_time = None
else:
self._end_time = end_day + timedelta(days=1)
self._duration = None
self._begin_precision = 'day'
@property
def status(self):
return self._status
@status.setter
def status(self, value):
if isinstance(value, str):
value = value.upper()
statuses = (None, 'TENTATIVE', 'CONFIRMED', 'CANCELLED')
if value not in statuses:
raise ValueError('status must be one of %s' % statuses)
self._status = value
def __repr__(self):
name = "'{}' ".format(self.name) if self.name else ''
if self.all_day:
if not self._end_time or self._begin == self._end_time:
return "<all-day Event {}{}>".format(name, self.begin.strftime('%Y-%m-%d'))
else:
return "<all-day Event {}begin:{} end:{}>".format(name, self._begin.strftime('%Y-%m-%d'), self._end_time.strftime('%Y-%m-%d'))
elif self.begin is None:
return "<Event '{}'>".format(self.name) if self.name else "<Event>"
else:
return "<Event {}begin:{} end:{}>".format(name, self.begin, self.end)
def starts_within(self, other):
if not isinstance(other, Event):
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
return self.begin >= other.begin and self.begin <= other.end
def ends_within(self, other):
if not isinstance(other, Event):
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
return self.end >= other.begin and self.end <= other.end
def intersects(self, other):
if not isinstance(other, Event):
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
return (self.starts_within(other)
or self.ends_within(other)
or other.starts_within(self)
or other.ends_within(self))
__xor__ = intersects
def includes(self, other):
if isinstance(other, Event):
return other.starts_within(self) and other.ends_within(self)
if isinstance(other, datetime):
return self.begin <= other and self.end >= other
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def is_included_in(self, other):
if isinstance(other, Event):
return other.includes(self)
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
__in__ = is_included_in
def __lt__(self, other):
if isinstance(other, Event):
if self.begin is None and other.begin is None:
if self.name is None and other.name is None:
return False
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name < other.name
# if we arrive here, at least one of self.begin
# and other.begin is not None
# so if they are equal, they are both Arrow
elif self.begin == other.begin:
if self.end is None:
return True
elif other.end is None:
return False
else:
return self.end < other.end
else:
return self.begin < other.begin
if isinstance(other, datetime):
return self.begin < other
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def __le__(self, other):
if isinstance(other, Event):
if self.begin is None and other.begin is None:
if self.name is None and other.name is None:
return True
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name <= other.name
elif self.begin == other.begin:
if self.end is None:
return True
elif other.end is None:
return False
else:
return self.end <= other.end
else:
return self.begin <= other.begin
if isinstance(other, datetime):
return self.begin <= other
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def __gt__(self, other):
return not self.__le__(other)
def __ge__(self, other):
return not self.__lt__(other)
def __or__(self, other):
if isinstance(other, Event):
begin, end = None, None
if self.begin and other.begin:
begin = max(self.begin, other.begin)
if self.end and other.end:
end = min(self.end, other.end)
return (begin, end) if begin and end and begin < end else (None, None)
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def __eq__(self, other):
"""Two events are considered equal if they have the same uid."""
if isinstance(other, Event):
return self.uid == other.uid
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def time_equals(self, other):
return (self.begin == other.begin) and (self.end == other.end)
def join(self, other, *args, **kwarg):
"""Create a new event which covers the time range of two intersecting events
All extra parameters are passed to the Event constructor.
Args:
other: the other event
Returns:
a new Event instance
"""
event = Event(*args, **kwarg)
if self.intersects(other):
if self.starts_within(other):
event.begin = other.begin
else:
event.begin = self.begin
if self.ends_within(other):
event.end = other.end
else:
event.end = self.end
return event
raise ValueError('Cannot join {} with {}: they don\'t intersect.'.format(self, other))
__and__ = join
def clone(self):
"""
Returns:
Event: an exact copy of self"""
clone = copy.copy(self)
clone._unused = clone._unused.clone()
clone.alarms = copy.copy(self.alarms)
clone.categories = copy.copy(self.categories)
return clone
def __hash__(self):
"""
Returns:
int: hash of self. Based on self.uid."""
return int(''.join(map(lambda x: '%.3d' % ord(x), self.uid)))
|
C4ptainCrunch/ics.py
|
ics/event.py
|
Event.duration
|
python
|
def duration(self):
if self._duration:
return self._duration
elif self.end:
# because of the clever getter for end, this also takes care of all_day events
return self.end - self.begin
else:
# event has neither start, nor end, nor duration
return None
|
Get or set the duration of the event.
| Will return a timedelta object.
| May be set to anything that timedelta() understands.
| May be set with a dict ({"days":2, "hours":6}).
| If set to a non null value, removes any already
existing end time.
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/event.py#L179-L195
| null |
class Event(Component):
"""A calendar event.
Can be full-day or between two instants.
Can be defined by a beginning instant and
a duration *or* end instant.
"""
_TYPE = "VEVENT"
_EXTRACTORS = []
_OUTPUTS = []
def __init__(self,
name=None,
begin=None,
end=None,
duration=None,
uid=None,
description=None,
created=None,
location=None,
url=None,
transparent=False,
alarms=None,
categories=None,
status=None,
):
"""Instantiates a new :class:`ics.event.Event`.
Args:
name (string) : rfc5545 SUMMARY property
begin (Arrow-compatible)
end (Arrow-compatible)
duration (datetime.timedelta)
uid (string): must be unique
description (string)
created (Arrow-compatible)
location (string)
url (string)
transparent (Boolean)
alarms (:class:`ics.alarm.Alarm`)
categories (set of string)
status (string)
Raises:
ValueError: if `end` and `duration` are specified at the same time
"""
self._duration = None
self._end_time = None
self._begin = None
self._begin_precision = None
self.uid = uid_gen() if not uid else uid
self.description = description
self.created = get_arrow(created)
self.location = location
self.url = url
self.transparent = transparent
self.alarms = set()
self.categories = set()
self._unused = Container(name='VEVENT')
self.name = name
self.begin = begin
# TODO: DRY [1]
if duration and end:
raise ValueError(
'Event() may not specify an end and a duration \
at the same time')
elif end: # End was specified
self.end = end
elif duration: # Duration was specified
self.duration = duration
if alarms is not None:
self.alarms.update(set(alarms))
self.status = status
if categories is not None:
self.categories.update(set(categories))
def has_end(self):
"""
Return:
bool: self has an end
"""
return bool(self._end_time or self._duration)
@property
def begin(self):
"""Get or set the beginning of the event.
| Will return an :class:`Arrow` object.
| May be set to anything that :func:`Arrow.get` understands.
| If an end is defined (not a duration), .begin must not
be set to a superior value.
"""
return self._begin
@begin.setter
def begin(self, value):
value = get_arrow(value)
if value and self._end_time and value > self._end_time:
raise ValueError('Begin must be before end')
self._begin = value
self._begin_precision = 'second'
@property
def end(self):
"""Get or set the end of the event.
| Will return an :class:`Arrow` object.
| May be set to anything that :func:`Arrow.get` understands.
| If set to a non null value, removes any already
existing duration.
| Setting to None will have unexpected behavior if
begin is not None.
| Must not be set to an inferior value than self.begin.
"""
if self._duration: # if end is duration defined
# return the beginning + duration
return self.begin + self._duration
elif self._end_time: # if end is time defined
if self.all_day:
return self._end_time
else:
return self._end_time
elif self._begin: # if end is not defined
if self.all_day:
return self._begin + timedelta(days=1)
else:
# instant event
return self._begin
else:
return None
@end.setter
def end(self, value):
value = get_arrow(value)
if value and self._begin and value < self._begin:
raise ValueError('End must be after begin')
self._end_time = value
if value:
self._duration = None
@property
@duration.setter
def duration(self, value):
if isinstance(value, dict):
value = timedelta(**value)
elif isinstance(value, timedelta):
value = value
elif value is not None:
value = timedelta(value)
if value:
self._end_time = None
self._duration = value
@property
def all_day(self):
"""
Return:
bool: self is an all-day event
"""
# the event may have an end, also given in 'day' precision
return self._begin_precision == 'day'
def make_all_day(self):
"""Transforms self to an all-day event.
The event will span all the days from the begin to the end day.
"""
if self.all_day:
# Do nothing if we already are a all day event
return
begin_day = self.begin.floor('day')
end_day = self.end.floor('day')
self._begin = begin_day
# for a one day event, we don't need a _end_time
if begin_day == end_day:
self._end_time = None
else:
self._end_time = end_day + timedelta(days=1)
self._duration = None
self._begin_precision = 'day'
@property
def status(self):
return self._status
@status.setter
def status(self, value):
if isinstance(value, str):
value = value.upper()
statuses = (None, 'TENTATIVE', 'CONFIRMED', 'CANCELLED')
if value not in statuses:
raise ValueError('status must be one of %s' % statuses)
self._status = value
def __repr__(self):
name = "'{}' ".format(self.name) if self.name else ''
if self.all_day:
if not self._end_time or self._begin == self._end_time:
return "<all-day Event {}{}>".format(name, self.begin.strftime('%Y-%m-%d'))
else:
return "<all-day Event {}begin:{} end:{}>".format(name, self._begin.strftime('%Y-%m-%d'), self._end_time.strftime('%Y-%m-%d'))
elif self.begin is None:
return "<Event '{}'>".format(self.name) if self.name else "<Event>"
else:
return "<Event {}begin:{} end:{}>".format(name, self.begin, self.end)
def starts_within(self, other):
if not isinstance(other, Event):
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
return self.begin >= other.begin and self.begin <= other.end
def ends_within(self, other):
if not isinstance(other, Event):
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
return self.end >= other.begin and self.end <= other.end
def intersects(self, other):
if not isinstance(other, Event):
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
return (self.starts_within(other)
or self.ends_within(other)
or other.starts_within(self)
or other.ends_within(self))
__xor__ = intersects
def includes(self, other):
if isinstance(other, Event):
return other.starts_within(self) and other.ends_within(self)
if isinstance(other, datetime):
return self.begin <= other and self.end >= other
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def is_included_in(self, other):
if isinstance(other, Event):
return other.includes(self)
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
__in__ = is_included_in
def __lt__(self, other):
if isinstance(other, Event):
if self.begin is None and other.begin is None:
if self.name is None and other.name is None:
return False
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name < other.name
# if we arrive here, at least one of self.begin
# and other.begin is not None
# so if they are equal, they are both Arrow
elif self.begin == other.begin:
if self.end is None:
return True
elif other.end is None:
return False
else:
return self.end < other.end
else:
return self.begin < other.begin
if isinstance(other, datetime):
return self.begin < other
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def __le__(self, other):
if isinstance(other, Event):
if self.begin is None and other.begin is None:
if self.name is None and other.name is None:
return True
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name <= other.name
elif self.begin == other.begin:
if self.end is None:
return True
elif other.end is None:
return False
else:
return self.end <= other.end
else:
return self.begin <= other.begin
if isinstance(other, datetime):
return self.begin <= other
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def __gt__(self, other):
return not self.__le__(other)
def __ge__(self, other):
return not self.__lt__(other)
def __or__(self, other):
if isinstance(other, Event):
begin, end = None, None
if self.begin and other.begin:
begin = max(self.begin, other.begin)
if self.end and other.end:
end = min(self.end, other.end)
return (begin, end) if begin and end and begin < end else (None, None)
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def __eq__(self, other):
"""Two events are considered equal if they have the same uid."""
if isinstance(other, Event):
return self.uid == other.uid
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def time_equals(self, other):
return (self.begin == other.begin) and (self.end == other.end)
def join(self, other, *args, **kwarg):
"""Create a new event which covers the time range of two intersecting events
All extra parameters are passed to the Event constructor.
Args:
other: the other event
Returns:
a new Event instance
"""
event = Event(*args, **kwarg)
if self.intersects(other):
if self.starts_within(other):
event.begin = other.begin
else:
event.begin = self.begin
if self.ends_within(other):
event.end = other.end
else:
event.end = self.end
return event
raise ValueError('Cannot join {} with {}: they don\'t intersect.'.format(self, other))
__and__ = join
def clone(self):
"""
Returns:
Event: an exact copy of self"""
clone = copy.copy(self)
clone._unused = clone._unused.clone()
clone.alarms = copy.copy(self.alarms)
clone.categories = copy.copy(self.categories)
return clone
def __hash__(self):
"""
Returns:
int: hash of self. Based on self.uid."""
return int(''.join(map(lambda x: '%.3d' % ord(x), self.uid)))
|
C4ptainCrunch/ics.py
|
ics/event.py
|
Event.make_all_day
|
python
|
def make_all_day(self):
if self.all_day:
# Do nothing if we already are a all day event
return
begin_day = self.begin.floor('day')
end_day = self.end.floor('day')
self._begin = begin_day
# for a one day event, we don't need a _end_time
if begin_day == end_day:
self._end_time = None
else:
self._end_time = end_day + timedelta(days=1)
self._duration = None
self._begin_precision = 'day'
|
Transforms self to an all-day event.
The event will span all the days from the begin to the end day.
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/event.py#L220-L241
| null |
class Event(Component):
"""A calendar event.
Can be full-day or between two instants.
Can be defined by a beginning instant and
a duration *or* end instant.
"""
_TYPE = "VEVENT"
_EXTRACTORS = []
_OUTPUTS = []
def __init__(self,
name=None,
begin=None,
end=None,
duration=None,
uid=None,
description=None,
created=None,
location=None,
url=None,
transparent=False,
alarms=None,
categories=None,
status=None,
):
"""Instantiates a new :class:`ics.event.Event`.
Args:
name (string) : rfc5545 SUMMARY property
begin (Arrow-compatible)
end (Arrow-compatible)
duration (datetime.timedelta)
uid (string): must be unique
description (string)
created (Arrow-compatible)
location (string)
url (string)
transparent (Boolean)
alarms (:class:`ics.alarm.Alarm`)
categories (set of string)
status (string)
Raises:
ValueError: if `end` and `duration` are specified at the same time
"""
self._duration = None
self._end_time = None
self._begin = None
self._begin_precision = None
self.uid = uid_gen() if not uid else uid
self.description = description
self.created = get_arrow(created)
self.location = location
self.url = url
self.transparent = transparent
self.alarms = set()
self.categories = set()
self._unused = Container(name='VEVENT')
self.name = name
self.begin = begin
# TODO: DRY [1]
if duration and end:
raise ValueError(
'Event() may not specify an end and a duration \
at the same time')
elif end: # End was specified
self.end = end
elif duration: # Duration was specified
self.duration = duration
if alarms is not None:
self.alarms.update(set(alarms))
self.status = status
if categories is not None:
self.categories.update(set(categories))
def has_end(self):
"""
Return:
bool: self has an end
"""
return bool(self._end_time or self._duration)
@property
def begin(self):
"""Get or set the beginning of the event.
| Will return an :class:`Arrow` object.
| May be set to anything that :func:`Arrow.get` understands.
| If an end is defined (not a duration), .begin must not
be set to a superior value.
"""
return self._begin
@begin.setter
def begin(self, value):
value = get_arrow(value)
if value and self._end_time and value > self._end_time:
raise ValueError('Begin must be before end')
self._begin = value
self._begin_precision = 'second'
@property
def end(self):
"""Get or set the end of the event.
| Will return an :class:`Arrow` object.
| May be set to anything that :func:`Arrow.get` understands.
| If set to a non null value, removes any already
existing duration.
| Setting to None will have unexpected behavior if
begin is not None.
| Must not be set to an inferior value than self.begin.
"""
if self._duration: # if end is duration defined
# return the beginning + duration
return self.begin + self._duration
elif self._end_time: # if end is time defined
if self.all_day:
return self._end_time
else:
return self._end_time
elif self._begin: # if end is not defined
if self.all_day:
return self._begin + timedelta(days=1)
else:
# instant event
return self._begin
else:
return None
@end.setter
def end(self, value):
value = get_arrow(value)
if value and self._begin and value < self._begin:
raise ValueError('End must be after begin')
self._end_time = value
if value:
self._duration = None
@property
def duration(self):
"""Get or set the duration of the event.
| Will return a timedelta object.
| May be set to anything that timedelta() understands.
| May be set with a dict ({"days":2, "hours":6}).
| If set to a non null value, removes any already
existing end time.
"""
if self._duration:
return self._duration
elif self.end:
# because of the clever getter for end, this also takes care of all_day events
return self.end - self.begin
else:
# event has neither start, nor end, nor duration
return None
@duration.setter
def duration(self, value):
if isinstance(value, dict):
value = timedelta(**value)
elif isinstance(value, timedelta):
value = value
elif value is not None:
value = timedelta(value)
if value:
self._end_time = None
self._duration = value
@property
def all_day(self):
"""
Return:
bool: self is an all-day event
"""
# the event may have an end, also given in 'day' precision
return self._begin_precision == 'day'
@property
def status(self):
return self._status
@status.setter
def status(self, value):
if isinstance(value, str):
value = value.upper()
statuses = (None, 'TENTATIVE', 'CONFIRMED', 'CANCELLED')
if value not in statuses:
raise ValueError('status must be one of %s' % statuses)
self._status = value
def __repr__(self):
name = "'{}' ".format(self.name) if self.name else ''
if self.all_day:
if not self._end_time or self._begin == self._end_time:
return "<all-day Event {}{}>".format(name, self.begin.strftime('%Y-%m-%d'))
else:
return "<all-day Event {}begin:{} end:{}>".format(name, self._begin.strftime('%Y-%m-%d'), self._end_time.strftime('%Y-%m-%d'))
elif self.begin is None:
return "<Event '{}'>".format(self.name) if self.name else "<Event>"
else:
return "<Event {}begin:{} end:{}>".format(name, self.begin, self.end)
def starts_within(self, other):
if not isinstance(other, Event):
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
return self.begin >= other.begin and self.begin <= other.end
def ends_within(self, other):
if not isinstance(other, Event):
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
return self.end >= other.begin and self.end <= other.end
def intersects(self, other):
if not isinstance(other, Event):
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
return (self.starts_within(other)
or self.ends_within(other)
or other.starts_within(self)
or other.ends_within(self))
__xor__ = intersects
def includes(self, other):
if isinstance(other, Event):
return other.starts_within(self) and other.ends_within(self)
if isinstance(other, datetime):
return self.begin <= other and self.end >= other
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def is_included_in(self, other):
if isinstance(other, Event):
return other.includes(self)
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
__in__ = is_included_in
def __lt__(self, other):
if isinstance(other, Event):
if self.begin is None and other.begin is None:
if self.name is None and other.name is None:
return False
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name < other.name
# if we arrive here, at least one of self.begin
# and other.begin is not None
# so if they are equal, they are both Arrow
elif self.begin == other.begin:
if self.end is None:
return True
elif other.end is None:
return False
else:
return self.end < other.end
else:
return self.begin < other.begin
if isinstance(other, datetime):
return self.begin < other
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def __le__(self, other):
if isinstance(other, Event):
if self.begin is None and other.begin is None:
if self.name is None and other.name is None:
return True
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name <= other.name
elif self.begin == other.begin:
if self.end is None:
return True
elif other.end is None:
return False
else:
return self.end <= other.end
else:
return self.begin <= other.begin
if isinstance(other, datetime):
return self.begin <= other
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def __gt__(self, other):
return not self.__le__(other)
def __ge__(self, other):
return not self.__lt__(other)
def __or__(self, other):
if isinstance(other, Event):
begin, end = None, None
if self.begin and other.begin:
begin = max(self.begin, other.begin)
if self.end and other.end:
end = min(self.end, other.end)
return (begin, end) if begin and end and begin < end else (None, None)
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def __eq__(self, other):
"""Two events are considered equal if they have the same uid."""
if isinstance(other, Event):
return self.uid == other.uid
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def time_equals(self, other):
return (self.begin == other.begin) and (self.end == other.end)
def join(self, other, *args, **kwarg):
"""Create a new event which covers the time range of two intersecting events
All extra parameters are passed to the Event constructor.
Args:
other: the other event
Returns:
a new Event instance
"""
event = Event(*args, **kwarg)
if self.intersects(other):
if self.starts_within(other):
event.begin = other.begin
else:
event.begin = self.begin
if self.ends_within(other):
event.end = other.end
else:
event.end = self.end
return event
raise ValueError('Cannot join {} with {}: they don\'t intersect.'.format(self, other))
__and__ = join
def clone(self):
"""
Returns:
Event: an exact copy of self"""
clone = copy.copy(self)
clone._unused = clone._unused.clone()
clone.alarms = copy.copy(self.alarms)
clone.categories = copy.copy(self.categories)
return clone
def __hash__(self):
"""
Returns:
int: hash of self. Based on self.uid."""
return int(''.join(map(lambda x: '%.3d' % ord(x), self.uid)))
|
C4ptainCrunch/ics.py
|
ics/event.py
|
Event.join
|
python
|
def join(self, other, *args, **kwarg):
event = Event(*args, **kwarg)
if self.intersects(other):
if self.starts_within(other):
event.begin = other.begin
else:
event.begin = self.begin
if self.ends_within(other):
event.end = other.end
else:
event.end = self.end
return event
raise ValueError('Cannot join {} with {}: they don\'t intersect.'.format(self, other))
|
Create a new event which covers the time range of two intersecting events
All extra parameters are passed to the Event constructor.
Args:
other: the other event
Returns:
a new Event instance
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/event.py#L387-L411
|
[
"def intersects(self, other):\n if not isinstance(other, Event):\n raise NotImplementedError(\n 'Cannot compare Event and {}'.format(type(other)))\n return (self.starts_within(other)\n or self.ends_within(other)\n or other.starts_within(self)\n or other.ends_within(self))\n"
] |
class Event(Component):
"""A calendar event.
Can be full-day or between two instants.
Can be defined by a beginning instant and
a duration *or* end instant.
"""
_TYPE = "VEVENT"
_EXTRACTORS = []
_OUTPUTS = []
def __init__(self,
name=None,
begin=None,
end=None,
duration=None,
uid=None,
description=None,
created=None,
location=None,
url=None,
transparent=False,
alarms=None,
categories=None,
status=None,
):
"""Instantiates a new :class:`ics.event.Event`.
Args:
name (string) : rfc5545 SUMMARY property
begin (Arrow-compatible)
end (Arrow-compatible)
duration (datetime.timedelta)
uid (string): must be unique
description (string)
created (Arrow-compatible)
location (string)
url (string)
transparent (Boolean)
alarms (:class:`ics.alarm.Alarm`)
categories (set of string)
status (string)
Raises:
ValueError: if `end` and `duration` are specified at the same time
"""
self._duration = None
self._end_time = None
self._begin = None
self._begin_precision = None
self.uid = uid_gen() if not uid else uid
self.description = description
self.created = get_arrow(created)
self.location = location
self.url = url
self.transparent = transparent
self.alarms = set()
self.categories = set()
self._unused = Container(name='VEVENT')
self.name = name
self.begin = begin
# TODO: DRY [1]
if duration and end:
raise ValueError(
'Event() may not specify an end and a duration \
at the same time')
elif end: # End was specified
self.end = end
elif duration: # Duration was specified
self.duration = duration
if alarms is not None:
self.alarms.update(set(alarms))
self.status = status
if categories is not None:
self.categories.update(set(categories))
def has_end(self):
"""
Return:
bool: self has an end
"""
return bool(self._end_time or self._duration)
@property
def begin(self):
"""Get or set the beginning of the event.
| Will return an :class:`Arrow` object.
| May be set to anything that :func:`Arrow.get` understands.
| If an end is defined (not a duration), .begin must not
be set to a superior value.
"""
return self._begin
@begin.setter
def begin(self, value):
value = get_arrow(value)
if value and self._end_time and value > self._end_time:
raise ValueError('Begin must be before end')
self._begin = value
self._begin_precision = 'second'
@property
def end(self):
"""Get or set the end of the event.
| Will return an :class:`Arrow` object.
| May be set to anything that :func:`Arrow.get` understands.
| If set to a non null value, removes any already
existing duration.
| Setting to None will have unexpected behavior if
begin is not None.
| Must not be set to an inferior value than self.begin.
"""
if self._duration: # if end is duration defined
# return the beginning + duration
return self.begin + self._duration
elif self._end_time: # if end is time defined
if self.all_day:
return self._end_time
else:
return self._end_time
elif self._begin: # if end is not defined
if self.all_day:
return self._begin + timedelta(days=1)
else:
# instant event
return self._begin
else:
return None
@end.setter
def end(self, value):
value = get_arrow(value)
if value and self._begin and value < self._begin:
raise ValueError('End must be after begin')
self._end_time = value
if value:
self._duration = None
@property
def duration(self):
"""Get or set the duration of the event.
| Will return a timedelta object.
| May be set to anything that timedelta() understands.
| May be set with a dict ({"days":2, "hours":6}).
| If set to a non null value, removes any already
existing end time.
"""
if self._duration:
return self._duration
elif self.end:
# because of the clever getter for end, this also takes care of all_day events
return self.end - self.begin
else:
# event has neither start, nor end, nor duration
return None
@duration.setter
def duration(self, value):
if isinstance(value, dict):
value = timedelta(**value)
elif isinstance(value, timedelta):
value = value
elif value is not None:
value = timedelta(value)
if value:
self._end_time = None
self._duration = value
@property
def all_day(self):
"""
Return:
bool: self is an all-day event
"""
# the event may have an end, also given in 'day' precision
return self._begin_precision == 'day'
def make_all_day(self):
"""Transforms self to an all-day event.
The event will span all the days from the begin to the end day.
"""
if self.all_day:
# Do nothing if we already are a all day event
return
begin_day = self.begin.floor('day')
end_day = self.end.floor('day')
self._begin = begin_day
# for a one day event, we don't need a _end_time
if begin_day == end_day:
self._end_time = None
else:
self._end_time = end_day + timedelta(days=1)
self._duration = None
self._begin_precision = 'day'
@property
def status(self):
return self._status
@status.setter
def status(self, value):
if isinstance(value, str):
value = value.upper()
statuses = (None, 'TENTATIVE', 'CONFIRMED', 'CANCELLED')
if value not in statuses:
raise ValueError('status must be one of %s' % statuses)
self._status = value
def __repr__(self):
name = "'{}' ".format(self.name) if self.name else ''
if self.all_day:
if not self._end_time or self._begin == self._end_time:
return "<all-day Event {}{}>".format(name, self.begin.strftime('%Y-%m-%d'))
else:
return "<all-day Event {}begin:{} end:{}>".format(name, self._begin.strftime('%Y-%m-%d'), self._end_time.strftime('%Y-%m-%d'))
elif self.begin is None:
return "<Event '{}'>".format(self.name) if self.name else "<Event>"
else:
return "<Event {}begin:{} end:{}>".format(name, self.begin, self.end)
def starts_within(self, other):
if not isinstance(other, Event):
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
return self.begin >= other.begin and self.begin <= other.end
def ends_within(self, other):
if not isinstance(other, Event):
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
return self.end >= other.begin and self.end <= other.end
def intersects(self, other):
if not isinstance(other, Event):
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
return (self.starts_within(other)
or self.ends_within(other)
or other.starts_within(self)
or other.ends_within(self))
__xor__ = intersects
def includes(self, other):
if isinstance(other, Event):
return other.starts_within(self) and other.ends_within(self)
if isinstance(other, datetime):
return self.begin <= other and self.end >= other
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def is_included_in(self, other):
if isinstance(other, Event):
return other.includes(self)
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
__in__ = is_included_in
def __lt__(self, other):
if isinstance(other, Event):
if self.begin is None and other.begin is None:
if self.name is None and other.name is None:
return False
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name < other.name
# if we arrive here, at least one of self.begin
# and other.begin is not None
# so if they are equal, they are both Arrow
elif self.begin == other.begin:
if self.end is None:
return True
elif other.end is None:
return False
else:
return self.end < other.end
else:
return self.begin < other.begin
if isinstance(other, datetime):
return self.begin < other
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def __le__(self, other):
if isinstance(other, Event):
if self.begin is None and other.begin is None:
if self.name is None and other.name is None:
return True
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name <= other.name
elif self.begin == other.begin:
if self.end is None:
return True
elif other.end is None:
return False
else:
return self.end <= other.end
else:
return self.begin <= other.begin
if isinstance(other, datetime):
return self.begin <= other
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def __gt__(self, other):
return not self.__le__(other)
def __ge__(self, other):
return not self.__lt__(other)
def __or__(self, other):
if isinstance(other, Event):
begin, end = None, None
if self.begin and other.begin:
begin = max(self.begin, other.begin)
if self.end and other.end:
end = min(self.end, other.end)
return (begin, end) if begin and end and begin < end else (None, None)
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def __eq__(self, other):
"""Two events are considered equal if they have the same uid."""
if isinstance(other, Event):
return self.uid == other.uid
raise NotImplementedError(
'Cannot compare Event and {}'.format(type(other)))
def time_equals(self, other):
return (self.begin == other.begin) and (self.end == other.end)
__and__ = join
def clone(self):
"""
Returns:
Event: an exact copy of self"""
clone = copy.copy(self)
clone._unused = clone._unused.clone()
clone.alarms = copy.copy(self.alarms)
clone.categories = copy.copy(self.categories)
return clone
def __hash__(self):
"""
Returns:
int: hash of self. Based on self.uid."""
return int(''.join(map(lambda x: '%.3d' % ord(x), self.uid)))
|
C4ptainCrunch/ics.py
|
ics/icalendar.py
|
timezone
|
python
|
def timezone(calendar, vtimezones):
for vtimezone in vtimezones:
remove_x(vtimezone) # Remove non standard lines from the block
fake_file = StringIO()
fake_file.write(str(vtimezone)) # Represent the block as a string
fake_file.seek(0)
timezones = tzical(fake_file) # tzical does not like strings
# timezones is a tzical object and could contain multiple timezones
for key in timezones.keys():
calendar._timezones[key] = timezones.get(key)
|
Receives a list of VTIMEZONE blocks.
Parses them and adds them to calendar._timezones.
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/icalendar.py#L184-L197
|
[
"def remove_x(container):\n for i in reversed(range(len(container))):\n item = container[i]\n if item.name.startswith('X-'):\n del container[i]\n"
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from six import StringIO, string_types, text_type, integer_types
from dateutil.tz import tzical
import copy
import collections
from .component import Component
from .timeline import Timeline
from .event import Event
from .todo import Todo
from .parse import (
lines_to_container,
string_to_container,
ContentLine,
Container,
)
from .utils import remove_x
class Calendar(Component):
"""Represents an unique rfc5545 iCalendar."""
_TYPE = 'VCALENDAR'
_EXTRACTORS = []
_OUTPUTS = []
def __init__(self, imports=None, events=None, todos=None, creator=None):
"""Instantiates a new Calendar.
Args:
imports (string or list of lines/strings): data to be imported into the Calendar(),
events (set of Event): :class:`ics.event.Event`s to be added to the calendar
todos (set of Todo): :class:`ics.event.Todo`s to be added to the calendar
creator (string): uid of the creator program.
If `imports` is specified, every other argument will be ignored.
"""
# TODO : implement a file-descriptor import and a filename import
self._timezones = {}
self.events = set()
self.todos = set()
self._unused = Container(name='VCALENDAR')
self.scale = None
self.method = None
self.timeline = Timeline(self)
if imports is not None:
if isinstance(imports, string_types):
container = string_to_container(imports)
elif isinstance(imports, collections.Iterable):
container = lines_to_container(imports)
else:
raise TypeError("Expecting a sequence or a string")
# TODO : make a better API for multiple calendars
if len(container) != 1:
raise NotImplementedError(
'Multiple calendars in one file are not supported')
self._populate(container[0]) # Use first calendar
else:
if events is not None:
self.events.update(set(events))
if todos is not None:
self.todos.update(set(todos))
self._creator = creator
def __repr__(self):
return "<Calendar with {} event{} and {} todo{}>" \
.format(len(self.events),
"s" if len(self.events) > 1 else "",
len(self.todos),
"s" if len(self.todos) > 1 else "")
def __iter__(self):
"""Returns:
iterable: an iterable version of __str__, line per line
(with line-endings).
Example:
Can be used to write calendar to a file:
>>> c = Calendar(); c.append(Event(name="My cool event"))
>>> open('my.ics', 'w').writelines(c)
"""
for line in str(self).split('\n'):
l = line + '\n'
yield l
def __eq__(self, other):
for attr in ('_unused', 'scale', 'method', 'creator'):
if self.__getattribute__(attr) != other.__getattribute__(attr):
return False
return (self.events == other.events) and (self.todos == other.todos)
def __ne__(self, other):
return not self.__eq__(other)
@property
def creator(self):
"""Get or set the calendar's creator.
| Will return a string.
| May be set to a string.
| Creator is the PRODID iCalendar property.
| It uniquely identifies the program that created the calendar.
"""
return self._creator
@creator.setter
def creator(self, value):
if not isinstance(value, text_type):
raise ValueError('Event.creator must be unicode data not {}'.format(type(value)))
self._creator = value
def clone(self):
"""
Returns:
Calendar: an exact deep copy of self
"""
clone = copy.copy(self)
clone._unused = clone._unused.clone()
clone.events = copy.copy(self.events)
clone.todos = copy.copy(self.todos)
clone._timezones = copy.copy(self._timezones)
return clone
# ------------------
# ----- Inputs -----
# ------------------
@Calendar._extracts('PRODID', required=True)
def prodid(calendar, prodid):
calendar._creator = prodid.value
__version_default__ = [ContentLine(name='VERSION', value='2.0')]
@Calendar._extracts('VERSION', required=True, default=__version_default__)
def version(calendar, line):
version = line
# TODO : should take care of minver/maxver
if ';' in version.value:
_, calendar.version = version.value.split(';')
else:
calendar.version = version.value
@Calendar._extracts('CALSCALE')
def scale(calendar, line):
calscale = line
if calscale:
calendar.scale = calscale.value.lower()
calendar.scale_params = calscale.params
else:
calendar.scale = 'georgian'
calendar.scale_params = {}
@Calendar._extracts('METHOD')
def method(calendar, line):
method = line
if method:
calendar.method = method.value
calendar.method_params = method.params
else:
calendar.method = None
calendar.method_params = {}
@Calendar._extracts('VTIMEZONE', multiple=True)
@Calendar._extracts('VEVENT', multiple=True)
def events(calendar, lines):
# tz=calendar._timezones gives access to the event factory to the
# timezones list
def event_factory(x):
return Event._from_container(x, tz=calendar._timezones)
calendar.events = set(map(event_factory, lines))
@Calendar._extracts('VTODO', multiple=True)
def todos(calendar, lines):
# tz=calendar._timezones gives access to the event factory to the
# timezones list
def todo_factory(x):
return Todo._from_container(x, tz=calendar._timezones)
calendar.todos = set(map(todo_factory, lines))
# -------------------
# ----- Outputs -----
# -------------------
@Calendar._outputs
def o_prodid(calendar, container):
creator = calendar.creator if calendar.creator else \
'ics.py - http://git.io/lLljaA'
container.append(ContentLine('PRODID', value=creator))
@Calendar._outputs
def o_version(calendar, container):
container.append(ContentLine('VERSION', value='2.0'))
@Calendar._outputs
def o_scale(calendar, container):
if calendar.scale:
container.append(ContentLine('CALSCALE', value=calendar.scale.upper()))
@Calendar._outputs
def o_method(calendar, container):
if calendar.method:
container.append(ContentLine('METHOD', value=calendar.method.upper()))
@Calendar._outputs
def o_events(calendar, container):
for event in calendar.events:
container.append(str(event))
@Calendar._outputs
def o_todos(calendar, container):
for todo in calendar.todos:
container.append(str(todo))
|
C4ptainCrunch/ics.py
|
ics/icalendar.py
|
Calendar.clone
|
python
|
def clone(self):
clone = copy.copy(self)
clone._unused = clone._unused.clone()
clone.events = copy.copy(self.events)
clone.todos = copy.copy(self.todos)
clone._timezones = copy.copy(self._timezones)
return clone
|
Returns:
Calendar: an exact deep copy of self
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/icalendar.py#L126-L136
| null |
class Calendar(Component):
"""Represents an unique rfc5545 iCalendar."""
_TYPE = 'VCALENDAR'
_EXTRACTORS = []
_OUTPUTS = []
def __init__(self, imports=None, events=None, todos=None, creator=None):
"""Instantiates a new Calendar.
Args:
imports (string or list of lines/strings): data to be imported into the Calendar(),
events (set of Event): :class:`ics.event.Event`s to be added to the calendar
todos (set of Todo): :class:`ics.event.Todo`s to be added to the calendar
creator (string): uid of the creator program.
If `imports` is specified, every other argument will be ignored.
"""
# TODO : implement a file-descriptor import and a filename import
self._timezones = {}
self.events = set()
self.todos = set()
self._unused = Container(name='VCALENDAR')
self.scale = None
self.method = None
self.timeline = Timeline(self)
if imports is not None:
if isinstance(imports, string_types):
container = string_to_container(imports)
elif isinstance(imports, collections.Iterable):
container = lines_to_container(imports)
else:
raise TypeError("Expecting a sequence or a string")
# TODO : make a better API for multiple calendars
if len(container) != 1:
raise NotImplementedError(
'Multiple calendars in one file are not supported')
self._populate(container[0]) # Use first calendar
else:
if events is not None:
self.events.update(set(events))
if todos is not None:
self.todos.update(set(todos))
self._creator = creator
def __repr__(self):
return "<Calendar with {} event{} and {} todo{}>" \
.format(len(self.events),
"s" if len(self.events) > 1 else "",
len(self.todos),
"s" if len(self.todos) > 1 else "")
def __iter__(self):
"""Returns:
iterable: an iterable version of __str__, line per line
(with line-endings).
Example:
Can be used to write calendar to a file:
>>> c = Calendar(); c.append(Event(name="My cool event"))
>>> open('my.ics', 'w').writelines(c)
"""
for line in str(self).split('\n'):
l = line + '\n'
yield l
def __eq__(self, other):
for attr in ('_unused', 'scale', 'method', 'creator'):
if self.__getattribute__(attr) != other.__getattribute__(attr):
return False
return (self.events == other.events) and (self.todos == other.todos)
def __ne__(self, other):
return not self.__eq__(other)
@property
def creator(self):
"""Get or set the calendar's creator.
| Will return a string.
| May be set to a string.
| Creator is the PRODID iCalendar property.
| It uniquely identifies the program that created the calendar.
"""
return self._creator
@creator.setter
def creator(self, value):
if not isinstance(value, text_type):
raise ValueError('Event.creator must be unicode data not {}'.format(type(value)))
self._creator = value
|
C4ptainCrunch/ics.py
|
ics/todo.py
|
Todo.due
|
python
|
def due(self):
if self._duration:
# if due is duration defined return the beginning + duration
return self.begin + self._duration
elif self._due_time:
# if due is time defined
return self._due_time
else:
return None
|
Get or set the end of the todo.
| Will return an :class:`Arrow` object.
| May be set to anything that :func:`Arrow.get` understands.
| If set to a non null value, removes any already
existing duration.
| Setting to None will have unexpected behavior if
begin is not None.
| Must not be set to an inferior value than self.begin.
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/todo.py#L158-L177
|
[
"def get_arrow(value):\n if value is None:\n return None\n elif isinstance(value, Arrow):\n return value\n elif isinstance(value, tuple):\n return arrow.get(*value)\n elif isinstance(value, dict):\n return arrow.get(**value)\n else:\n return arrow.get(value)\n"
] |
class Todo(Component):
"""A todo list entry.
Can have a start time and duration, or start and due time,
or only start/due time.
"""
_TYPE = "VTODO"
_EXTRACTORS = []
_OUTPUTS = []
def __init__(self,
dtstamp=None,
uid=None,
completed=None,
created=None,
description=None,
begin=None,
location=None,
percent=None,
priority=None,
name=None,
url=None,
due=None,
duration=None,
alarms=None,
status=None):
"""Instantiates a new :class:`ics.todo.Todo`.
Args:
uid (string): must be unique
dtstamp (Arrow-compatible)
completed (Arrow-compatible)
created (Arrow-compatible)
description (string)
begin (Arrow-compatible)
location (string)
percent (int): 0-100
priority (int): 0-9
name (string) : rfc5545 SUMMARY property
url (string)
due (Arrow-compatible)
duration (datetime.timedelta)
alarms (:class:`ics.alarm.Alarm`)
status (string)
Raises:
ValueError: if `duration` and `due` are specified at the same time
"""
self._percent = None
self._priority = None
self._begin = None
self._due_time = None
self._duration = None
self.uid = uid_gen() if not uid else uid
self.dtstamp = arrow.now() if not dtstamp else get_arrow(dtstamp)
self.completed = get_arrow(completed)
self.created = get_arrow(created)
self.description = description
self.begin = begin
self.location = location
self.percent = percent
self.priority = priority
self.name = name
self.url = url
self.alarms = set()
self._unused = Container(name='VTODO')
if duration and due:
raise ValueError(
'Todo() may not specify a duration and due date\
at the same time')
elif duration:
if not begin:
raise ValueError(
'Todo() must specify a begin if a duration\
is specified')
self.duration = duration
elif due:
self.due = due
if alarms is not None:
self.alarms.update(set(alarms))
self.status = status
@property
def percent(self):
return self._percent
@percent.setter
def percent(self, value):
if value:
value = int(value)
if value < 0 or value > 100:
raise ValueError('percent must be [0, 100]')
self._percent = value
@property
def priority(self):
return self._priority
@priority.setter
def priority(self, value):
if value:
value = int(value)
if value < 0 or value > 9:
raise ValueError('priority must be [0, 9]')
self._priority = value
@property
def begin(self):
"""Get or set the beginning of the todo.
| Will return an :class:`Arrow` object.
| May be set to anything that :func:`Arrow.get` understands.
| If a due time is defined (not a duration), .begin must not
be set to a superior value.
"""
return self._begin
@begin.setter
def begin(self, value):
value = get_arrow(value)
if value and self._due_time and value > self._due_time:
raise ValueError('Begin must be before due time')
self._begin = value
@property
@due.setter
def due(self, value):
value = get_arrow(value)
if value and self._begin and value < self._begin:
raise ValueError('Due must be after begin')
self._due_time = value
if value:
self._duration = None
@property
def duration(self):
"""Get or set the duration of the todo.
| Will return a timedelta object.
| May be set to anything that timedelta() understands.
| May be set with a dict ({"days":2, "hours":6}).
| If set to a non null value, removes any already
existing end time.
"""
if self._duration:
return self._duration
elif self.due:
return self.due - self.begin
else:
# todo has neither due, nor start and duration
return None
@duration.setter
def duration(self, value):
if isinstance(value, dict):
value = timedelta(**value)
elif isinstance(value, timedelta):
value = value
elif value is not None:
value = timedelta(value)
self._duration = value
if value:
self._due_time = None
@property
def status(self):
return self._status
@status.setter
def status(self, value):
if isinstance(value, str):
value = value.upper()
statuses = (None, 'NEEDS-ACTION', 'COMPLETED', 'IN-PROCESS', 'CANCELLED')
if value not in statuses:
raise ValueError('status must be one of %s' % statuses)
self._status = value
def __repr__(self):
if self.name is None:
return "<Todo>"
if self.begin is None and self.due is None:
return "<Todo '{}'>".format(self.name)
if self.due is None:
return "<Todo '{}' begin:{}>".format(self.name, self.begin)
if self.begin is None:
return "<Todo '{}' due:{}>".format(self.name, self.due)
return "<Todo '{}' begin:{} due:{}>".format(self.name, self.begin, self.due)
def __lt__(self, other):
if isinstance(other, Todo):
if self.due is None and other.due is None:
if self.name is None and other.name is None:
return False
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name < other.name
return self.due < other.due
if isinstance(other, datetime):
if self.due:
return self.due < other
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def __le__(self, other):
if isinstance(other, Todo):
if self.due is None and other.due is None:
if self.name is None and other.name is None:
return True
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name <= other.name
return self.due <= other.due
if isinstance(other, datetime):
if self.due:
return self.due <= other
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def __gt__(self, other):
if isinstance(other, Todo):
if self.due is None and other.due is None:
if self.name is None and other.name is None:
return False
elif self.name is None:
return False
elif other.name is None:
return True
else:
return self.name > other.name
return self.due > other.due
if isinstance(other, datetime):
if self.due:
return self.due > other
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def __ge__(self, other):
if isinstance(other, Todo):
if self.due is None and other.due is None:
if self.name is None and other.name is None:
return True
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name >= other.name
return self.due >= other.due
if isinstance(other, datetime):
if self.due:
return self.due >= other
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def __eq__(self, other):
"""Two todos are considered equal if they have the same uid."""
if isinstance(other, Todo):
return self.uid == other.uid
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def __ne__(self, other):
"""Two todos are considered not equal if they do not have the same uid."""
if isinstance(other, Todo):
return self.uid != other.uid
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def clone(self):
"""
Returns:
Todo: an exact copy of self"""
clone = copy.copy(self)
clone._unused = clone._unused.clone()
clone.alarms = copy.copy(self.alarms)
return clone
def __hash__(self):
"""
Returns:
int: hash of self. Based on self.uid."""
return int(''.join(map(lambda x: '%.3d' % ord(x), self.uid)))
|
C4ptainCrunch/ics.py
|
ics/todo.py
|
Todo.duration
|
python
|
def duration(self):
if self._duration:
return self._duration
elif self.due:
return self.due - self.begin
else:
# todo has neither due, nor start and duration
return None
|
Get or set the duration of the todo.
| Will return a timedelta object.
| May be set to anything that timedelta() understands.
| May be set with a dict ({"days":2, "hours":6}).
| If set to a non null value, removes any already
existing end time.
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/todo.py#L191-L206
| null |
class Todo(Component):
"""A todo list entry.
Can have a start time and duration, or start and due time,
or only start/due time.
"""
_TYPE = "VTODO"
_EXTRACTORS = []
_OUTPUTS = []
def __init__(self,
dtstamp=None,
uid=None,
completed=None,
created=None,
description=None,
begin=None,
location=None,
percent=None,
priority=None,
name=None,
url=None,
due=None,
duration=None,
alarms=None,
status=None):
"""Instantiates a new :class:`ics.todo.Todo`.
Args:
uid (string): must be unique
dtstamp (Arrow-compatible)
completed (Arrow-compatible)
created (Arrow-compatible)
description (string)
begin (Arrow-compatible)
location (string)
percent (int): 0-100
priority (int): 0-9
name (string) : rfc5545 SUMMARY property
url (string)
due (Arrow-compatible)
duration (datetime.timedelta)
alarms (:class:`ics.alarm.Alarm`)
status (string)
Raises:
ValueError: if `duration` and `due` are specified at the same time
"""
self._percent = None
self._priority = None
self._begin = None
self._due_time = None
self._duration = None
self.uid = uid_gen() if not uid else uid
self.dtstamp = arrow.now() if not dtstamp else get_arrow(dtstamp)
self.completed = get_arrow(completed)
self.created = get_arrow(created)
self.description = description
self.begin = begin
self.location = location
self.percent = percent
self.priority = priority
self.name = name
self.url = url
self.alarms = set()
self._unused = Container(name='VTODO')
if duration and due:
raise ValueError(
'Todo() may not specify a duration and due date\
at the same time')
elif duration:
if not begin:
raise ValueError(
'Todo() must specify a begin if a duration\
is specified')
self.duration = duration
elif due:
self.due = due
if alarms is not None:
self.alarms.update(set(alarms))
self.status = status
@property
def percent(self):
return self._percent
@percent.setter
def percent(self, value):
if value:
value = int(value)
if value < 0 or value > 100:
raise ValueError('percent must be [0, 100]')
self._percent = value
@property
def priority(self):
return self._priority
@priority.setter
def priority(self, value):
if value:
value = int(value)
if value < 0 or value > 9:
raise ValueError('priority must be [0, 9]')
self._priority = value
@property
def begin(self):
"""Get or set the beginning of the todo.
| Will return an :class:`Arrow` object.
| May be set to anything that :func:`Arrow.get` understands.
| If a due time is defined (not a duration), .begin must not
be set to a superior value.
"""
return self._begin
@begin.setter
def begin(self, value):
value = get_arrow(value)
if value and self._due_time and value > self._due_time:
raise ValueError('Begin must be before due time')
self._begin = value
@property
def due(self):
"""Get or set the end of the todo.
| Will return an :class:`Arrow` object.
| May be set to anything that :func:`Arrow.get` understands.
| If set to a non null value, removes any already
existing duration.
| Setting to None will have unexpected behavior if
begin is not None.
| Must not be set to an inferior value than self.begin.
"""
if self._duration:
# if due is duration defined return the beginning + duration
return self.begin + self._duration
elif self._due_time:
# if due is time defined
return self._due_time
else:
return None
@due.setter
def due(self, value):
value = get_arrow(value)
if value and self._begin and value < self._begin:
raise ValueError('Due must be after begin')
self._due_time = value
if value:
self._duration = None
@property
@duration.setter
def duration(self, value):
if isinstance(value, dict):
value = timedelta(**value)
elif isinstance(value, timedelta):
value = value
elif value is not None:
value = timedelta(value)
self._duration = value
if value:
self._due_time = None
@property
def status(self):
return self._status
@status.setter
def status(self, value):
if isinstance(value, str):
value = value.upper()
statuses = (None, 'NEEDS-ACTION', 'COMPLETED', 'IN-PROCESS', 'CANCELLED')
if value not in statuses:
raise ValueError('status must be one of %s' % statuses)
self._status = value
def __repr__(self):
if self.name is None:
return "<Todo>"
if self.begin is None and self.due is None:
return "<Todo '{}'>".format(self.name)
if self.due is None:
return "<Todo '{}' begin:{}>".format(self.name, self.begin)
if self.begin is None:
return "<Todo '{}' due:{}>".format(self.name, self.due)
return "<Todo '{}' begin:{} due:{}>".format(self.name, self.begin, self.due)
def __lt__(self, other):
if isinstance(other, Todo):
if self.due is None and other.due is None:
if self.name is None and other.name is None:
return False
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name < other.name
return self.due < other.due
if isinstance(other, datetime):
if self.due:
return self.due < other
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def __le__(self, other):
if isinstance(other, Todo):
if self.due is None and other.due is None:
if self.name is None and other.name is None:
return True
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name <= other.name
return self.due <= other.due
if isinstance(other, datetime):
if self.due:
return self.due <= other
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def __gt__(self, other):
if isinstance(other, Todo):
if self.due is None and other.due is None:
if self.name is None and other.name is None:
return False
elif self.name is None:
return False
elif other.name is None:
return True
else:
return self.name > other.name
return self.due > other.due
if isinstance(other, datetime):
if self.due:
return self.due > other
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def __ge__(self, other):
if isinstance(other, Todo):
if self.due is None and other.due is None:
if self.name is None and other.name is None:
return True
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name >= other.name
return self.due >= other.due
if isinstance(other, datetime):
if self.due:
return self.due >= other
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def __eq__(self, other):
"""Two todos are considered equal if they have the same uid."""
if isinstance(other, Todo):
return self.uid == other.uid
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def __ne__(self, other):
"""Two todos are considered not equal if they do not have the same uid."""
if isinstance(other, Todo):
return self.uid != other.uid
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def clone(self):
"""
Returns:
Todo: an exact copy of self"""
clone = copy.copy(self)
clone._unused = clone._unused.clone()
clone.alarms = copy.copy(self.alarms)
return clone
def __hash__(self):
"""
Returns:
int: hash of self. Based on self.uid."""
return int(''.join(map(lambda x: '%.3d' % ord(x), self.uid)))
|
C4ptainCrunch/ics.py
|
ics/todo.py
|
Todo.clone
|
python
|
def clone(self):
clone = copy.copy(self)
clone._unused = clone._unused.clone()
clone.alarms = copy.copy(self.alarms)
return clone
|
Returns:
Todo: an exact copy of self
|
train
|
https://github.com/C4ptainCrunch/ics.py/blob/bd918ec7453a7cf73a906cdcc78bd88eb4bab71b/ics/todo.py#L332-L339
| null |
class Todo(Component):
"""A todo list entry.
Can have a start time and duration, or start and due time,
or only start/due time.
"""
_TYPE = "VTODO"
_EXTRACTORS = []
_OUTPUTS = []
def __init__(self,
dtstamp=None,
uid=None,
completed=None,
created=None,
description=None,
begin=None,
location=None,
percent=None,
priority=None,
name=None,
url=None,
due=None,
duration=None,
alarms=None,
status=None):
"""Instantiates a new :class:`ics.todo.Todo`.
Args:
uid (string): must be unique
dtstamp (Arrow-compatible)
completed (Arrow-compatible)
created (Arrow-compatible)
description (string)
begin (Arrow-compatible)
location (string)
percent (int): 0-100
priority (int): 0-9
name (string) : rfc5545 SUMMARY property
url (string)
due (Arrow-compatible)
duration (datetime.timedelta)
alarms (:class:`ics.alarm.Alarm`)
status (string)
Raises:
ValueError: if `duration` and `due` are specified at the same time
"""
self._percent = None
self._priority = None
self._begin = None
self._due_time = None
self._duration = None
self.uid = uid_gen() if not uid else uid
self.dtstamp = arrow.now() if not dtstamp else get_arrow(dtstamp)
self.completed = get_arrow(completed)
self.created = get_arrow(created)
self.description = description
self.begin = begin
self.location = location
self.percent = percent
self.priority = priority
self.name = name
self.url = url
self.alarms = set()
self._unused = Container(name='VTODO')
if duration and due:
raise ValueError(
'Todo() may not specify a duration and due date\
at the same time')
elif duration:
if not begin:
raise ValueError(
'Todo() must specify a begin if a duration\
is specified')
self.duration = duration
elif due:
self.due = due
if alarms is not None:
self.alarms.update(set(alarms))
self.status = status
@property
def percent(self):
return self._percent
@percent.setter
def percent(self, value):
if value:
value = int(value)
if value < 0 or value > 100:
raise ValueError('percent must be [0, 100]')
self._percent = value
@property
def priority(self):
return self._priority
@priority.setter
def priority(self, value):
if value:
value = int(value)
if value < 0 or value > 9:
raise ValueError('priority must be [0, 9]')
self._priority = value
@property
def begin(self):
"""Get or set the beginning of the todo.
| Will return an :class:`Arrow` object.
| May be set to anything that :func:`Arrow.get` understands.
| If a due time is defined (not a duration), .begin must not
be set to a superior value.
"""
return self._begin
@begin.setter
def begin(self, value):
value = get_arrow(value)
if value and self._due_time and value > self._due_time:
raise ValueError('Begin must be before due time')
self._begin = value
@property
def due(self):
"""Get or set the end of the todo.
| Will return an :class:`Arrow` object.
| May be set to anything that :func:`Arrow.get` understands.
| If set to a non null value, removes any already
existing duration.
| Setting to None will have unexpected behavior if
begin is not None.
| Must not be set to an inferior value than self.begin.
"""
if self._duration:
# if due is duration defined return the beginning + duration
return self.begin + self._duration
elif self._due_time:
# if due is time defined
return self._due_time
else:
return None
@due.setter
def due(self, value):
value = get_arrow(value)
if value and self._begin and value < self._begin:
raise ValueError('Due must be after begin')
self._due_time = value
if value:
self._duration = None
@property
def duration(self):
"""Get or set the duration of the todo.
| Will return a timedelta object.
| May be set to anything that timedelta() understands.
| May be set with a dict ({"days":2, "hours":6}).
| If set to a non null value, removes any already
existing end time.
"""
if self._duration:
return self._duration
elif self.due:
return self.due - self.begin
else:
# todo has neither due, nor start and duration
return None
@duration.setter
def duration(self, value):
if isinstance(value, dict):
value = timedelta(**value)
elif isinstance(value, timedelta):
value = value
elif value is not None:
value = timedelta(value)
self._duration = value
if value:
self._due_time = None
@property
def status(self):
return self._status
@status.setter
def status(self, value):
if isinstance(value, str):
value = value.upper()
statuses = (None, 'NEEDS-ACTION', 'COMPLETED', 'IN-PROCESS', 'CANCELLED')
if value not in statuses:
raise ValueError('status must be one of %s' % statuses)
self._status = value
def __repr__(self):
if self.name is None:
return "<Todo>"
if self.begin is None and self.due is None:
return "<Todo '{}'>".format(self.name)
if self.due is None:
return "<Todo '{}' begin:{}>".format(self.name, self.begin)
if self.begin is None:
return "<Todo '{}' due:{}>".format(self.name, self.due)
return "<Todo '{}' begin:{} due:{}>".format(self.name, self.begin, self.due)
def __lt__(self, other):
if isinstance(other, Todo):
if self.due is None and other.due is None:
if self.name is None and other.name is None:
return False
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name < other.name
return self.due < other.due
if isinstance(other, datetime):
if self.due:
return self.due < other
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def __le__(self, other):
if isinstance(other, Todo):
if self.due is None and other.due is None:
if self.name is None and other.name is None:
return True
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name <= other.name
return self.due <= other.due
if isinstance(other, datetime):
if self.due:
return self.due <= other
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def __gt__(self, other):
if isinstance(other, Todo):
if self.due is None and other.due is None:
if self.name is None and other.name is None:
return False
elif self.name is None:
return False
elif other.name is None:
return True
else:
return self.name > other.name
return self.due > other.due
if isinstance(other, datetime):
if self.due:
return self.due > other
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def __ge__(self, other):
if isinstance(other, Todo):
if self.due is None and other.due is None:
if self.name is None and other.name is None:
return True
elif self.name is None:
return True
elif other.name is None:
return False
else:
return self.name >= other.name
return self.due >= other.due
if isinstance(other, datetime):
if self.due:
return self.due >= other
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def __eq__(self, other):
"""Two todos are considered equal if they have the same uid."""
if isinstance(other, Todo):
return self.uid == other.uid
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def __ne__(self, other):
"""Two todos are considered not equal if they do not have the same uid."""
if isinstance(other, Todo):
return self.uid != other.uid
raise NotImplementedError(
'Cannot compare Todo and {}'.format(type(other)))
def __hash__(self):
"""
Returns:
int: hash of self. Based on self.uid."""
return int(''.join(map(lambda x: '%.3d' % ord(x), self.uid)))
|
moble/quaternion
|
quaternion_time_series.py
|
slerp
|
python
|
def slerp(R1, R2, t1, t2, t_out):
tau = (t_out-t1)/(t2-t1)
return np.slerp_vectorized(R1, R2, tau)
|
Spherical linear interpolation of rotors
This function uses a simpler interface than the more fundamental
`slerp_evaluate` and `slerp_vectorized` functions. The latter
are fast, being implemented at the C level, but take input `tau`
instead of time. This function adjusts the time accordingly.
Parameters
----------
R1: quaternion
Quaternion at beginning of interpolation
R2: quaternion
Quaternion at end of interpolation
t1: float
Time corresponding to R1
t2: float
Time corresponding to R2
t_out: float or array of floats
Times to which the rotors should be interpolated
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/quaternion_time_series.py#L11-L35
| null |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import print_function, division, absolute_import
import numpy as np
import quaternion
from quaternion.numba_wrapper import njit
def squad(R_in, t_in, t_out):
"""Spherical "quadrangular" interpolation of rotors with a cubic spline
This is the best way to interpolate rotations. It uses the analog
of a cubic spline, except that the interpolant is confined to the
rotor manifold in a natural way. Alternative methods involving
interpolation of other coordinates on the rotation group or
normalization of interpolated values give bad results. The
results from this method are as natural as any, and are continuous
in first and second derivatives.
The input `R_in` rotors are assumed to be reasonably continuous
(no sign flips), and the input `t` arrays are assumed to be
sorted. No checking is done for either case, and you may get
silently bad results if these conditions are violated.
This function simplifies the calling, compared to `squad_evaluate`
(which takes a set of four quaternions forming the edges of the
"quadrangle", and the normalized time `tau`) and `squad_vectorized`
(which takes the same arguments, but in array form, and efficiently
loops over them).
Parameters
----------
R_in: array of quaternions
A time-series of rotors (unit quaternions) to be interpolated
t_in: array of float
The times corresponding to R_in
t_out: array of float
The times to which R_in should be interpolated
"""
if R_in.size == 0 or t_out.size == 0:
return np.array((), dtype=np.quaternion)
# This list contains an index for each `t_out` such that
# t_in[i-1] <= t_out < t_in[i]
# Note that `side='right'` is much faster in my tests
# i_in_for_out = t_in.searchsorted(t_out, side='left')
# np.clip(i_in_for_out, 0, len(t_in) - 1, out=i_in_for_out)
i_in_for_out = t_in.searchsorted(t_out, side='right')-1
# Now, for each index `i` in `i_in`, we need to compute the
# interpolation "coefficients" (`A_i`, `B_ip1`).
#
# I previously tested an explicit version of the loops below,
# comparing `stride_tricks.as_strided` with explicit
# implementation via `roll` (as seen here). I found that the
# `roll` was significantly more efficient for simple calculations,
# though the difference is probably totally washed out here. In
# any case, it might be useful to test again.
#
A = R_in * np.exp((- np.log((~R_in) * np.roll(R_in, -1))
+ np.log((~np.roll(R_in, 1)) * R_in) * ((np.roll(t_in, -1) - t_in) / (t_in - np.roll(t_in, 1)))
) * 0.25)
B = np.roll(R_in, -1) * np.exp((np.log((~np.roll(R_in, -1)) * np.roll(R_in, -2))
* ((np.roll(t_in, -1) - t_in) / (np.roll(t_in, -2) - np.roll(t_in, -1)))
- np.log((~R_in) * np.roll(R_in, -1))) * -0.25)
# Correct the first and last A time steps, and last two B time steps. We extend R_in with the following wrap-around
# values:
# R_in[0-1] = R_in[0]*(~R_in[1])*R_in[0]
# R_in[n+0] = R_in[-1] * (~R_in[-2]) * R_in[-1]
# R_in[n+1] = R_in[0] * (~R_in[-1]) * R_in[0]
# = R_in[-1] * (~R_in[-2]) * R_in[-1] * (~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1]
# = R_in[-1] * (~R_in[-2]) * R_in[-1] * (~R_in[-2]) * R_in[-1]
# A[i] = R_in[i] * np.exp((- np.log((~R_in[i]) * R_in[i+1])
# + np.log((~R_in[i-1]) * R_in[i]) * ((t_in[i+1] - t_in[i]) / (t_in[i] - t_in[i-1]))
# ) * 0.25)
# A[0] = R_in[0] * np.exp((- np.log((~R_in[0]) * R_in[1]) + np.log((~R_in[0])*R_in[1]*(~R_in[0])) * R_in[0]) * 0.25)
# = R_in[0]
A[0] = R_in[0]
# A[-1] = R_in[-1] * np.exp((- np.log((~R_in[-1]) * R_in[n+0])
# + np.log((~R_in[-2]) * R_in[-1]) * ((t_in[n+0] - t_in[-1]) / (t_in[-1] - t_in[-2]))
# ) * 0.25)
# = R_in[-1] * np.exp((- np.log((~R_in[-1]) * R_in[n+0]) + np.log((~R_in[-2]) * R_in[-1])) * 0.25)
# = R_in[-1] * np.exp((- np.log((~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1])
# + np.log((~R_in[-2]) * R_in[-1])) * 0.25)
# = R_in[-1] * np.exp((- np.log((~R_in[-2]) * R_in[-1]) + np.log((~R_in[-2]) * R_in[-1])) * 0.25)
# = R_in[-1]
A[-1] = R_in[-1]
# B[i] = R_in[i+1] * np.exp((np.log((~R_in[i+1]) * R_in[i+2]) * ((t_in[i+1] - t_in[i]) / (t_in[i+2] - t_in[i+1]))
# - np.log((~R_in[i]) * R_in[i+1])) * -0.25)
# B[-2] = R_in[-1] * np.exp((np.log((~R_in[-1]) * R_in[0]) * ((t_in[-1] - t_in[-2]) / (t_in[0] - t_in[-1]))
# - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1] * np.exp((np.log((~R_in[-1]) * R_in[0]) - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1] * np.exp((np.log((~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1])
# - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1] * np.exp((np.log((~R_in[-2]) * R_in[-1]) - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1]
B[-2] = R_in[-1]
# B[-1] = R_in[0]
# B[-1] = R_in[0] * np.exp((np.log((~R_in[0]) * R_in[1]) - np.log((~R_in[-1]) * R_in[0])) * -0.25)
# = R_in[-1] * (~R_in[-2]) * R_in[-1]
# * np.exp((np.log((~(R_in[-1] * (~R_in[-2]) * R_in[-1])) * R_in[-1] * (~R_in[-2]) * R_in[-1] * (~R_in[-2]) * R_in[-1])
# - np.log((~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1] * (~R_in[-2]) * R_in[-1]
# * np.exp((np.log(((~R_in[-1]) * R_in[-2] * (~R_in[-1])) * R_in[-1] * (~R_in[-2]) * R_in[-1] * (~R_in[-2]) * R_in[-1])
# - np.log((~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1])) * -0.25)
# * np.exp((np.log((~R_in[-2]) * R_in[-1])
# - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
B[-1] = R_in[-1] * (~R_in[-2]) * R_in[-1]
# Use the coefficients at the corresponding t_out indices to
# compute the squad interpolant
# R_ip1 = np.array(np.roll(R_in, -1)[i_in_for_out])
# R_ip1[-1] = R_in[-1]*(~R_in[-2])*R_in[-1]
R_ip1 = np.roll(R_in, -1)
R_ip1[-1] = R_in[-1]*(~R_in[-2])*R_in[-1]
R_ip1 = np.array(R_ip1[i_in_for_out])
t_inp1 = np.roll(t_in, -1)
t_inp1[-1] = t_in[-1] + (t_in[-1] - t_in[-2])
tau = (t_out - t_in[i_in_for_out]) / ((t_inp1 - t_in)[i_in_for_out])
# tau = (t_out - t_in[i_in_for_out]) / ((np.roll(t_in, -1) - t_in)[i_in_for_out])
R_out = np.squad_vectorized(tau, R_in[i_in_for_out], A[i_in_for_out], B[i_in_for_out], R_ip1)
return R_out
@njit
def frame_from_angular_velocity_integrand(rfrak, Omega):
import math
from numpy import dot, cross
from .numpy_quaternion import _eps
rfrakMag = math.sqrt(rfrak[0] * rfrak[0] + rfrak[1] * rfrak[1] + rfrak[2] * rfrak[2])
OmegaMag = math.sqrt(Omega[0] * Omega[0] + Omega[1] * Omega[1] + Omega[2] * Omega[2])
# If the matrix is really close to the identity, return
if rfrakMag < _eps * OmegaMag:
return Omega[0] / 2.0, Omega[1] / 2.0, Omega[2] / 2.0
# If the matrix is really close to singular, it's equivalent to the identity, so return
if abs(math.sin(rfrakMag)) < _eps:
return Omega[0] / 2.0, Omega[1] / 2.0, Omega[2] / 2.0
OmegaOver2 = Omega[0] / 2.0, Omega[1] / 2.0, Omega[2] / 2.0
rfrakHat = rfrak[0] / rfrakMag, rfrak[1] / rfrakMag, rfrak[2] / rfrakMag
return ((OmegaOver2 - rfrakHat * dot(rfrakHat, OmegaOver2)) * (rfrakMag / math.tan(rfrakMag))
+ rfrakHat * dot(rfrakHat, OmegaOver2) + cross(OmegaOver2, rfrak))
class appending_array(object):
def __init__(self, shape, dtype=np.float, initial_array=None):
shape = list(shape)
if shape[0] < 4:
shape[0] = 4
self._a = np.empty(shape, dtype=dtype)
self.n = 0
if initial_array is not None:
assert initial_array.dtype == dtype
assert initial_array.shape[1:] == shape[1:]
assert initial_array.shape[0] <= shape[0]
self.n = initial_array.shape[0]
self._a[:self.n, ...] = initial_array[:]
def append(self, row):
self.n += 1
if self.n > self._a.shape[0]:
self._a = np.resize(self._a, (2*self._a.shape[0],)+self._a.shape[1:])
self._a[self.n-1, ...] = row
@property
def a(self):
return self._a[:self.n, ...]
def integrate_angular_velocity(Omega, t0, t1, R0=None, tolerance=1e-12):
"""Compute frame with given angular velocity
Parameters
==========
Omega: tuple or callable
Angular velocity from which to compute frame. Can be
1) a 2-tuple of float arrays (t, v) giving the angular velocity vector at a series of times,
2) a function of time that returns the 3-vector angular velocity, or
3) a function of time and orientation (t, R) that returns the 3-vector angular velocity
In case 1, the angular velocity will be interpolated to the required times. Note that accuracy
is poor in case 1.
t0: float
Initial time
t1: float
Final time
R0: quaternion, optional
Initial frame orientation. Defaults to 1 (the identity orientation).
tolerance: float, optional
Absolute tolerance used in integration. Defaults to 1e-12.
Returns
=======
t: float array
R: quaternion array
"""
import warnings
from scipy.integrate import ode
if R0 is None:
R0 = quaternion.one
input_is_tabulated = False
try:
t_Omega, v = Omega
from scipy.interpolate import InterpolatedUnivariateSpline
Omega_x = InterpolatedUnivariateSpline(t_Omega, v[:, 0])
Omega_y = InterpolatedUnivariateSpline(t_Omega, v[:, 1])
Omega_z = InterpolatedUnivariateSpline(t_Omega, v[:, 2])
def Omega_func(t, R):
return [Omega_x(t), Omega_y(t), Omega_z(t)]
Omega_func(t0, R0)
input_is_tabulated = True
except (TypeError, ValueError):
def Omega_func(t, R):
return Omega(t, R)
try:
Omega_func(t0, R0)
except TypeError:
def Omega_func(t, R):
return Omega(t)
Omega_func(t0, R0)
def RHS(t, y):
R = quaternion.quaternion(*y)
return (0.5 * quaternion.quaternion(0.0, *Omega_func(t, R)) * R).components
y0 = R0.components
if input_is_tabulated:
from scipy.integrate import solve_ivp
t = t_Omega
t_span = [t_Omega[0], t_Omega[-1]]
solution = solve_ivp(RHS, t_span, y0, t_eval=t_Omega, atol=tolerance, rtol=100*np.finfo(float).eps)
R = quaternion.from_float_array(solution.y.T)
else:
solver = ode(RHS)
solver.set_initial_value(y0, t0)
solver.set_integrator('dop853', nsteps=1, atol=tolerance, rtol=0.0)
solver._integrator.iwork[2] = -1 # suppress Fortran-printed warning
t = appending_array((int(t1-t0),))
t.append(solver.t)
R = appending_array((int(t1-t0), 4))
R.append(solver.y)
warnings.filterwarnings("ignore", category=UserWarning)
t_last = solver.t
while solver.t < t1:
solver.integrate(t1, step=True)
if solver.t > t_last:
t.append(solver.t)
R.append(solver.y)
t_last = solver.t
warnings.resetwarnings()
t = t.a
R = quaternion.as_quat_array(R.a)
return t, R
def minimal_rotation(R, t, iterations=2):
"""Adjust frame so that there is no rotation about z' axis
The output of this function is a frame that rotates the z axis onto the same z' axis as the
input frame, but with minimal rotation about that axis. This is done by pre-composing the input
rotation with a rotation about the z axis through an angle gamma, where
dgamma/dt = 2*(dR/dt * z * R.conjugate()).w
This ensures that the angular velocity has no component along the z' axis.
Note that this condition becomes easier to impose the closer the input rotation is to a
minimally rotating frame, which means that repeated application of this function improves its
accuracy. By default, this function is iterated twice, though a few more iterations may be
called for.
Parameters
==========
R: quaternion array
Time series describing rotation
t: float array
Corresponding times at which R is measured
iterations: int [defaults to 2]
Repeat the minimization to refine the result
"""
from scipy.interpolate import InterpolatedUnivariateSpline as spline
if iterations == 0:
return R
R = quaternion.as_float_array(R)
Rdot = np.empty_like(R)
for i in range(4):
Rdot[:, i] = spline(t, R[:, i]).derivative()(t)
R = quaternion.from_float_array(R)
Rdot = quaternion.from_float_array(Rdot)
halfgammadot = quaternion.as_float_array(Rdot * quaternion.z * R.conjugate())[:, 0]
halfgamma = spline(t, halfgammadot).antiderivative()(t)
Rgamma = np.exp(quaternion.z * halfgamma)
return minimal_rotation(R * Rgamma, t, iterations=iterations-1)
def angular_velocity(R, t):
from scipy.interpolate import InterpolatedUnivariateSpline as spline
R = quaternion.as_float_array(R)
Rdot = np.empty_like(R)
for i in range(4):
Rdot[:, i] = spline(t, R[:, i]).derivative()(t)
R = quaternion.from_float_array(R)
Rdot = quaternion.from_float_array(Rdot)
return np.array([omega.vec for omega in (2*Rdot/R)])
|
moble/quaternion
|
quaternion_time_series.py
|
squad
|
python
|
def squad(R_in, t_in, t_out):
if R_in.size == 0 or t_out.size == 0:
return np.array((), dtype=np.quaternion)
# This list contains an index for each `t_out` such that
# t_in[i-1] <= t_out < t_in[i]
# Note that `side='right'` is much faster in my tests
# i_in_for_out = t_in.searchsorted(t_out, side='left')
# np.clip(i_in_for_out, 0, len(t_in) - 1, out=i_in_for_out)
i_in_for_out = t_in.searchsorted(t_out, side='right')-1
# Now, for each index `i` in `i_in`, we need to compute the
# interpolation "coefficients" (`A_i`, `B_ip1`).
#
# I previously tested an explicit version of the loops below,
# comparing `stride_tricks.as_strided` with explicit
# implementation via `roll` (as seen here). I found that the
# `roll` was significantly more efficient for simple calculations,
# though the difference is probably totally washed out here. In
# any case, it might be useful to test again.
#
A = R_in * np.exp((- np.log((~R_in) * np.roll(R_in, -1))
+ np.log((~np.roll(R_in, 1)) * R_in) * ((np.roll(t_in, -1) - t_in) / (t_in - np.roll(t_in, 1)))
) * 0.25)
B = np.roll(R_in, -1) * np.exp((np.log((~np.roll(R_in, -1)) * np.roll(R_in, -2))
* ((np.roll(t_in, -1) - t_in) / (np.roll(t_in, -2) - np.roll(t_in, -1)))
- np.log((~R_in) * np.roll(R_in, -1))) * -0.25)
# Correct the first and last A time steps, and last two B time steps. We extend R_in with the following wrap-around
# values:
# R_in[0-1] = R_in[0]*(~R_in[1])*R_in[0]
# R_in[n+0] = R_in[-1] * (~R_in[-2]) * R_in[-1]
# R_in[n+1] = R_in[0] * (~R_in[-1]) * R_in[0]
# = R_in[-1] * (~R_in[-2]) * R_in[-1] * (~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1]
# = R_in[-1] * (~R_in[-2]) * R_in[-1] * (~R_in[-2]) * R_in[-1]
# A[i] = R_in[i] * np.exp((- np.log((~R_in[i]) * R_in[i+1])
# + np.log((~R_in[i-1]) * R_in[i]) * ((t_in[i+1] - t_in[i]) / (t_in[i] - t_in[i-1]))
# ) * 0.25)
# A[0] = R_in[0] * np.exp((- np.log((~R_in[0]) * R_in[1]) + np.log((~R_in[0])*R_in[1]*(~R_in[0])) * R_in[0]) * 0.25)
# = R_in[0]
A[0] = R_in[0]
# A[-1] = R_in[-1] * np.exp((- np.log((~R_in[-1]) * R_in[n+0])
# + np.log((~R_in[-2]) * R_in[-1]) * ((t_in[n+0] - t_in[-1]) / (t_in[-1] - t_in[-2]))
# ) * 0.25)
# = R_in[-1] * np.exp((- np.log((~R_in[-1]) * R_in[n+0]) + np.log((~R_in[-2]) * R_in[-1])) * 0.25)
# = R_in[-1] * np.exp((- np.log((~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1])
# + np.log((~R_in[-2]) * R_in[-1])) * 0.25)
# = R_in[-1] * np.exp((- np.log((~R_in[-2]) * R_in[-1]) + np.log((~R_in[-2]) * R_in[-1])) * 0.25)
# = R_in[-1]
A[-1] = R_in[-1]
# B[i] = R_in[i+1] * np.exp((np.log((~R_in[i+1]) * R_in[i+2]) * ((t_in[i+1] - t_in[i]) / (t_in[i+2] - t_in[i+1]))
# - np.log((~R_in[i]) * R_in[i+1])) * -0.25)
# B[-2] = R_in[-1] * np.exp((np.log((~R_in[-1]) * R_in[0]) * ((t_in[-1] - t_in[-2]) / (t_in[0] - t_in[-1]))
# - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1] * np.exp((np.log((~R_in[-1]) * R_in[0]) - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1] * np.exp((np.log((~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1])
# - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1] * np.exp((np.log((~R_in[-2]) * R_in[-1]) - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1]
B[-2] = R_in[-1]
# B[-1] = R_in[0]
# B[-1] = R_in[0] * np.exp((np.log((~R_in[0]) * R_in[1]) - np.log((~R_in[-1]) * R_in[0])) * -0.25)
# = R_in[-1] * (~R_in[-2]) * R_in[-1]
# * np.exp((np.log((~(R_in[-1] * (~R_in[-2]) * R_in[-1])) * R_in[-1] * (~R_in[-2]) * R_in[-1] * (~R_in[-2]) * R_in[-1])
# - np.log((~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1] * (~R_in[-2]) * R_in[-1]
# * np.exp((np.log(((~R_in[-1]) * R_in[-2] * (~R_in[-1])) * R_in[-1] * (~R_in[-2]) * R_in[-1] * (~R_in[-2]) * R_in[-1])
# - np.log((~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1])) * -0.25)
# * np.exp((np.log((~R_in[-2]) * R_in[-1])
# - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
B[-1] = R_in[-1] * (~R_in[-2]) * R_in[-1]
# Use the coefficients at the corresponding t_out indices to
# compute the squad interpolant
# R_ip1 = np.array(np.roll(R_in, -1)[i_in_for_out])
# R_ip1[-1] = R_in[-1]*(~R_in[-2])*R_in[-1]
R_ip1 = np.roll(R_in, -1)
R_ip1[-1] = R_in[-1]*(~R_in[-2])*R_in[-1]
R_ip1 = np.array(R_ip1[i_in_for_out])
t_inp1 = np.roll(t_in, -1)
t_inp1[-1] = t_in[-1] + (t_in[-1] - t_in[-2])
tau = (t_out - t_in[i_in_for_out]) / ((t_inp1 - t_in)[i_in_for_out])
# tau = (t_out - t_in[i_in_for_out]) / ((np.roll(t_in, -1) - t_in)[i_in_for_out])
R_out = np.squad_vectorized(tau, R_in[i_in_for_out], A[i_in_for_out], B[i_in_for_out], R_ip1)
return R_out
|
Spherical "quadrangular" interpolation of rotors with a cubic spline
This is the best way to interpolate rotations. It uses the analog
of a cubic spline, except that the interpolant is confined to the
rotor manifold in a natural way. Alternative methods involving
interpolation of other coordinates on the rotation group or
normalization of interpolated values give bad results. The
results from this method are as natural as any, and are continuous
in first and second derivatives.
The input `R_in` rotors are assumed to be reasonably continuous
(no sign flips), and the input `t` arrays are assumed to be
sorted. No checking is done for either case, and you may get
silently bad results if these conditions are violated.
This function simplifies the calling, compared to `squad_evaluate`
(which takes a set of four quaternions forming the edges of the
"quadrangle", and the normalized time `tau`) and `squad_vectorized`
(which takes the same arguments, but in array form, and efficiently
loops over them).
Parameters
----------
R_in: array of quaternions
A time-series of rotors (unit quaternions) to be interpolated
t_in: array of float
The times corresponding to R_in
t_out: array of float
The times to which R_in should be interpolated
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/quaternion_time_series.py#L38-L154
| null |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import print_function, division, absolute_import
import numpy as np
import quaternion
from quaternion.numba_wrapper import njit
def slerp(R1, R2, t1, t2, t_out):
"""Spherical linear interpolation of rotors
This function uses a simpler interface than the more fundamental
`slerp_evaluate` and `slerp_vectorized` functions. The latter
are fast, being implemented at the C level, but take input `tau`
instead of time. This function adjusts the time accordingly.
Parameters
----------
R1: quaternion
Quaternion at beginning of interpolation
R2: quaternion
Quaternion at end of interpolation
t1: float
Time corresponding to R1
t2: float
Time corresponding to R2
t_out: float or array of floats
Times to which the rotors should be interpolated
"""
tau = (t_out-t1)/(t2-t1)
return np.slerp_vectorized(R1, R2, tau)
@njit
def frame_from_angular_velocity_integrand(rfrak, Omega):
import math
from numpy import dot, cross
from .numpy_quaternion import _eps
rfrakMag = math.sqrt(rfrak[0] * rfrak[0] + rfrak[1] * rfrak[1] + rfrak[2] * rfrak[2])
OmegaMag = math.sqrt(Omega[0] * Omega[0] + Omega[1] * Omega[1] + Omega[2] * Omega[2])
# If the matrix is really close to the identity, return
if rfrakMag < _eps * OmegaMag:
return Omega[0] / 2.0, Omega[1] / 2.0, Omega[2] / 2.0
# If the matrix is really close to singular, it's equivalent to the identity, so return
if abs(math.sin(rfrakMag)) < _eps:
return Omega[0] / 2.0, Omega[1] / 2.0, Omega[2] / 2.0
OmegaOver2 = Omega[0] / 2.0, Omega[1] / 2.0, Omega[2] / 2.0
rfrakHat = rfrak[0] / rfrakMag, rfrak[1] / rfrakMag, rfrak[2] / rfrakMag
return ((OmegaOver2 - rfrakHat * dot(rfrakHat, OmegaOver2)) * (rfrakMag / math.tan(rfrakMag))
+ rfrakHat * dot(rfrakHat, OmegaOver2) + cross(OmegaOver2, rfrak))
class appending_array(object):
def __init__(self, shape, dtype=np.float, initial_array=None):
shape = list(shape)
if shape[0] < 4:
shape[0] = 4
self._a = np.empty(shape, dtype=dtype)
self.n = 0
if initial_array is not None:
assert initial_array.dtype == dtype
assert initial_array.shape[1:] == shape[1:]
assert initial_array.shape[0] <= shape[0]
self.n = initial_array.shape[0]
self._a[:self.n, ...] = initial_array[:]
def append(self, row):
self.n += 1
if self.n > self._a.shape[0]:
self._a = np.resize(self._a, (2*self._a.shape[0],)+self._a.shape[1:])
self._a[self.n-1, ...] = row
@property
def a(self):
return self._a[:self.n, ...]
def integrate_angular_velocity(Omega, t0, t1, R0=None, tolerance=1e-12):
"""Compute frame with given angular velocity
Parameters
==========
Omega: tuple or callable
Angular velocity from which to compute frame. Can be
1) a 2-tuple of float arrays (t, v) giving the angular velocity vector at a series of times,
2) a function of time that returns the 3-vector angular velocity, or
3) a function of time and orientation (t, R) that returns the 3-vector angular velocity
In case 1, the angular velocity will be interpolated to the required times. Note that accuracy
is poor in case 1.
t0: float
Initial time
t1: float
Final time
R0: quaternion, optional
Initial frame orientation. Defaults to 1 (the identity orientation).
tolerance: float, optional
Absolute tolerance used in integration. Defaults to 1e-12.
Returns
=======
t: float array
R: quaternion array
"""
import warnings
from scipy.integrate import ode
if R0 is None:
R0 = quaternion.one
input_is_tabulated = False
try:
t_Omega, v = Omega
from scipy.interpolate import InterpolatedUnivariateSpline
Omega_x = InterpolatedUnivariateSpline(t_Omega, v[:, 0])
Omega_y = InterpolatedUnivariateSpline(t_Omega, v[:, 1])
Omega_z = InterpolatedUnivariateSpline(t_Omega, v[:, 2])
def Omega_func(t, R):
return [Omega_x(t), Omega_y(t), Omega_z(t)]
Omega_func(t0, R0)
input_is_tabulated = True
except (TypeError, ValueError):
def Omega_func(t, R):
return Omega(t, R)
try:
Omega_func(t0, R0)
except TypeError:
def Omega_func(t, R):
return Omega(t)
Omega_func(t0, R0)
def RHS(t, y):
R = quaternion.quaternion(*y)
return (0.5 * quaternion.quaternion(0.0, *Omega_func(t, R)) * R).components
y0 = R0.components
if input_is_tabulated:
from scipy.integrate import solve_ivp
t = t_Omega
t_span = [t_Omega[0], t_Omega[-1]]
solution = solve_ivp(RHS, t_span, y0, t_eval=t_Omega, atol=tolerance, rtol=100*np.finfo(float).eps)
R = quaternion.from_float_array(solution.y.T)
else:
solver = ode(RHS)
solver.set_initial_value(y0, t0)
solver.set_integrator('dop853', nsteps=1, atol=tolerance, rtol=0.0)
solver._integrator.iwork[2] = -1 # suppress Fortran-printed warning
t = appending_array((int(t1-t0),))
t.append(solver.t)
R = appending_array((int(t1-t0), 4))
R.append(solver.y)
warnings.filterwarnings("ignore", category=UserWarning)
t_last = solver.t
while solver.t < t1:
solver.integrate(t1, step=True)
if solver.t > t_last:
t.append(solver.t)
R.append(solver.y)
t_last = solver.t
warnings.resetwarnings()
t = t.a
R = quaternion.as_quat_array(R.a)
return t, R
def minimal_rotation(R, t, iterations=2):
"""Adjust frame so that there is no rotation about z' axis
The output of this function is a frame that rotates the z axis onto the same z' axis as the
input frame, but with minimal rotation about that axis. This is done by pre-composing the input
rotation with a rotation about the z axis through an angle gamma, where
dgamma/dt = 2*(dR/dt * z * R.conjugate()).w
This ensures that the angular velocity has no component along the z' axis.
Note that this condition becomes easier to impose the closer the input rotation is to a
minimally rotating frame, which means that repeated application of this function improves its
accuracy. By default, this function is iterated twice, though a few more iterations may be
called for.
Parameters
==========
R: quaternion array
Time series describing rotation
t: float array
Corresponding times at which R is measured
iterations: int [defaults to 2]
Repeat the minimization to refine the result
"""
from scipy.interpolate import InterpolatedUnivariateSpline as spline
if iterations == 0:
return R
R = quaternion.as_float_array(R)
Rdot = np.empty_like(R)
for i in range(4):
Rdot[:, i] = spline(t, R[:, i]).derivative()(t)
R = quaternion.from_float_array(R)
Rdot = quaternion.from_float_array(Rdot)
halfgammadot = quaternion.as_float_array(Rdot * quaternion.z * R.conjugate())[:, 0]
halfgamma = spline(t, halfgammadot).antiderivative()(t)
Rgamma = np.exp(quaternion.z * halfgamma)
return minimal_rotation(R * Rgamma, t, iterations=iterations-1)
def angular_velocity(R, t):
from scipy.interpolate import InterpolatedUnivariateSpline as spline
R = quaternion.as_float_array(R)
Rdot = np.empty_like(R)
for i in range(4):
Rdot[:, i] = spline(t, R[:, i]).derivative()(t)
R = quaternion.from_float_array(R)
Rdot = quaternion.from_float_array(Rdot)
return np.array([omega.vec for omega in (2*Rdot/R)])
|
moble/quaternion
|
quaternion_time_series.py
|
integrate_angular_velocity
|
python
|
def integrate_angular_velocity(Omega, t0, t1, R0=None, tolerance=1e-12):
import warnings
from scipy.integrate import ode
if R0 is None:
R0 = quaternion.one
input_is_tabulated = False
try:
t_Omega, v = Omega
from scipy.interpolate import InterpolatedUnivariateSpline
Omega_x = InterpolatedUnivariateSpline(t_Omega, v[:, 0])
Omega_y = InterpolatedUnivariateSpline(t_Omega, v[:, 1])
Omega_z = InterpolatedUnivariateSpline(t_Omega, v[:, 2])
def Omega_func(t, R):
return [Omega_x(t), Omega_y(t), Omega_z(t)]
Omega_func(t0, R0)
input_is_tabulated = True
except (TypeError, ValueError):
def Omega_func(t, R):
return Omega(t, R)
try:
Omega_func(t0, R0)
except TypeError:
def Omega_func(t, R):
return Omega(t)
Omega_func(t0, R0)
def RHS(t, y):
R = quaternion.quaternion(*y)
return (0.5 * quaternion.quaternion(0.0, *Omega_func(t, R)) * R).components
y0 = R0.components
if input_is_tabulated:
from scipy.integrate import solve_ivp
t = t_Omega
t_span = [t_Omega[0], t_Omega[-1]]
solution = solve_ivp(RHS, t_span, y0, t_eval=t_Omega, atol=tolerance, rtol=100*np.finfo(float).eps)
R = quaternion.from_float_array(solution.y.T)
else:
solver = ode(RHS)
solver.set_initial_value(y0, t0)
solver.set_integrator('dop853', nsteps=1, atol=tolerance, rtol=0.0)
solver._integrator.iwork[2] = -1 # suppress Fortran-printed warning
t = appending_array((int(t1-t0),))
t.append(solver.t)
R = appending_array((int(t1-t0), 4))
R.append(solver.y)
warnings.filterwarnings("ignore", category=UserWarning)
t_last = solver.t
while solver.t < t1:
solver.integrate(t1, step=True)
if solver.t > t_last:
t.append(solver.t)
R.append(solver.y)
t_last = solver.t
warnings.resetwarnings()
t = t.a
R = quaternion.as_quat_array(R.a)
return t, R
|
Compute frame with given angular velocity
Parameters
==========
Omega: tuple or callable
Angular velocity from which to compute frame. Can be
1) a 2-tuple of float arrays (t, v) giving the angular velocity vector at a series of times,
2) a function of time that returns the 3-vector angular velocity, or
3) a function of time and orientation (t, R) that returns the 3-vector angular velocity
In case 1, the angular velocity will be interpolated to the required times. Note that accuracy
is poor in case 1.
t0: float
Initial time
t1: float
Final time
R0: quaternion, optional
Initial frame orientation. Defaults to 1 (the identity orientation).
tolerance: float, optional
Absolute tolerance used in integration. Defaults to 1e-12.
Returns
=======
t: float array
R: quaternion array
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/quaternion_time_series.py#L203-L291
|
[
"def as_quat_array(a):\n \"\"\"View a float array as an array of quaternions\n\n The input array must have a final dimension whose size is\n divisible by four (or better yet *is* 4), because successive\n indices in that last dimension will be considered successive\n components of the output quaternion.\n\n This function is usually fast (of order 1 microsecond) because no\n data is copied; the returned quantity is just a \"view\" of the\n original. However, if the input array is not C-contiguous\n (basically, as you increment the index into the last dimension of\n the array, you just move to the neighboring float in memory), the\n data will need to be copied which may be quite slow. Therefore,\n you should try to ensure that the input array is in that order.\n Slices and transpositions will frequently break that rule.\n\n We will not convert back from a two-spinor array because there is\n no unique convention for them, so I don't want to mess with that.\n Also, we want to discourage users from the slow, memory-copying\n process of swapping columns required for useful definitions of\n the two-spinors.\n\n \"\"\"\n a = np.asarray(a, dtype=np.double)\n\n # fast path\n if a.shape == (4,):\n return quaternion(a[0], a[1], a[2], a[3])\n\n # view only works if the last axis is C-contiguous\n if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:\n a = a.copy(order='C')\n try:\n av = a.view(np.quaternion)\n except ValueError as e:\n message = (str(e) + '\\n '\n + 'Failed to view input data as a series of quaternions. '\n + 'Please ensure that the last dimension has size divisible by 4.\\n '\n + 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))\n raise ValueError(message)\n\n # special case: don't create an axis for a single quaternion, to\n # match the output of `as_float_array`\n if av.shape[-1] == 1:\n av = av.reshape(a.shape[:-1])\n\n return av\n",
"def from_float_array(a):\n return as_quat_array(a)\n",
"def append(self, row):\n self.n += 1\n if self.n > self._a.shape[0]:\n self._a = np.resize(self._a, (2*self._a.shape[0],)+self._a.shape[1:])\n self._a[self.n-1, ...] = row\n",
"def Omega_func(t, R):\n return [Omega_x(t), Omega_y(t), Omega_z(t)]\n",
"def Omega_func(t, R):\n return Omega(t, R)\n",
"def Omega_func(t, R):\n return Omega(t)\n"
] |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import print_function, division, absolute_import
import numpy as np
import quaternion
from quaternion.numba_wrapper import njit
def slerp(R1, R2, t1, t2, t_out):
"""Spherical linear interpolation of rotors
This function uses a simpler interface than the more fundamental
`slerp_evaluate` and `slerp_vectorized` functions. The latter
are fast, being implemented at the C level, but take input `tau`
instead of time. This function adjusts the time accordingly.
Parameters
----------
R1: quaternion
Quaternion at beginning of interpolation
R2: quaternion
Quaternion at end of interpolation
t1: float
Time corresponding to R1
t2: float
Time corresponding to R2
t_out: float or array of floats
Times to which the rotors should be interpolated
"""
tau = (t_out-t1)/(t2-t1)
return np.slerp_vectorized(R1, R2, tau)
def squad(R_in, t_in, t_out):
"""Spherical "quadrangular" interpolation of rotors with a cubic spline
This is the best way to interpolate rotations. It uses the analog
of a cubic spline, except that the interpolant is confined to the
rotor manifold in a natural way. Alternative methods involving
interpolation of other coordinates on the rotation group or
normalization of interpolated values give bad results. The
results from this method are as natural as any, and are continuous
in first and second derivatives.
The input `R_in` rotors are assumed to be reasonably continuous
(no sign flips), and the input `t` arrays are assumed to be
sorted. No checking is done for either case, and you may get
silently bad results if these conditions are violated.
This function simplifies the calling, compared to `squad_evaluate`
(which takes a set of four quaternions forming the edges of the
"quadrangle", and the normalized time `tau`) and `squad_vectorized`
(which takes the same arguments, but in array form, and efficiently
loops over them).
Parameters
----------
R_in: array of quaternions
A time-series of rotors (unit quaternions) to be interpolated
t_in: array of float
The times corresponding to R_in
t_out: array of float
The times to which R_in should be interpolated
"""
if R_in.size == 0 or t_out.size == 0:
return np.array((), dtype=np.quaternion)
# This list contains an index for each `t_out` such that
# t_in[i-1] <= t_out < t_in[i]
# Note that `side='right'` is much faster in my tests
# i_in_for_out = t_in.searchsorted(t_out, side='left')
# np.clip(i_in_for_out, 0, len(t_in) - 1, out=i_in_for_out)
i_in_for_out = t_in.searchsorted(t_out, side='right')-1
# Now, for each index `i` in `i_in`, we need to compute the
# interpolation "coefficients" (`A_i`, `B_ip1`).
#
# I previously tested an explicit version of the loops below,
# comparing `stride_tricks.as_strided` with explicit
# implementation via `roll` (as seen here). I found that the
# `roll` was significantly more efficient for simple calculations,
# though the difference is probably totally washed out here. In
# any case, it might be useful to test again.
#
A = R_in * np.exp((- np.log((~R_in) * np.roll(R_in, -1))
+ np.log((~np.roll(R_in, 1)) * R_in) * ((np.roll(t_in, -1) - t_in) / (t_in - np.roll(t_in, 1)))
) * 0.25)
B = np.roll(R_in, -1) * np.exp((np.log((~np.roll(R_in, -1)) * np.roll(R_in, -2))
* ((np.roll(t_in, -1) - t_in) / (np.roll(t_in, -2) - np.roll(t_in, -1)))
- np.log((~R_in) * np.roll(R_in, -1))) * -0.25)
# Correct the first and last A time steps, and last two B time steps. We extend R_in with the following wrap-around
# values:
# R_in[0-1] = R_in[0]*(~R_in[1])*R_in[0]
# R_in[n+0] = R_in[-1] * (~R_in[-2]) * R_in[-1]
# R_in[n+1] = R_in[0] * (~R_in[-1]) * R_in[0]
# = R_in[-1] * (~R_in[-2]) * R_in[-1] * (~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1]
# = R_in[-1] * (~R_in[-2]) * R_in[-1] * (~R_in[-2]) * R_in[-1]
# A[i] = R_in[i] * np.exp((- np.log((~R_in[i]) * R_in[i+1])
# + np.log((~R_in[i-1]) * R_in[i]) * ((t_in[i+1] - t_in[i]) / (t_in[i] - t_in[i-1]))
# ) * 0.25)
# A[0] = R_in[0] * np.exp((- np.log((~R_in[0]) * R_in[1]) + np.log((~R_in[0])*R_in[1]*(~R_in[0])) * R_in[0]) * 0.25)
# = R_in[0]
A[0] = R_in[0]
# A[-1] = R_in[-1] * np.exp((- np.log((~R_in[-1]) * R_in[n+0])
# + np.log((~R_in[-2]) * R_in[-1]) * ((t_in[n+0] - t_in[-1]) / (t_in[-1] - t_in[-2]))
# ) * 0.25)
# = R_in[-1] * np.exp((- np.log((~R_in[-1]) * R_in[n+0]) + np.log((~R_in[-2]) * R_in[-1])) * 0.25)
# = R_in[-1] * np.exp((- np.log((~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1])
# + np.log((~R_in[-2]) * R_in[-1])) * 0.25)
# = R_in[-1] * np.exp((- np.log((~R_in[-2]) * R_in[-1]) + np.log((~R_in[-2]) * R_in[-1])) * 0.25)
# = R_in[-1]
A[-1] = R_in[-1]
# B[i] = R_in[i+1] * np.exp((np.log((~R_in[i+1]) * R_in[i+2]) * ((t_in[i+1] - t_in[i]) / (t_in[i+2] - t_in[i+1]))
# - np.log((~R_in[i]) * R_in[i+1])) * -0.25)
# B[-2] = R_in[-1] * np.exp((np.log((~R_in[-1]) * R_in[0]) * ((t_in[-1] - t_in[-2]) / (t_in[0] - t_in[-1]))
# - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1] * np.exp((np.log((~R_in[-1]) * R_in[0]) - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1] * np.exp((np.log((~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1])
# - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1] * np.exp((np.log((~R_in[-2]) * R_in[-1]) - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1]
B[-2] = R_in[-1]
# B[-1] = R_in[0]
# B[-1] = R_in[0] * np.exp((np.log((~R_in[0]) * R_in[1]) - np.log((~R_in[-1]) * R_in[0])) * -0.25)
# = R_in[-1] * (~R_in[-2]) * R_in[-1]
# * np.exp((np.log((~(R_in[-1] * (~R_in[-2]) * R_in[-1])) * R_in[-1] * (~R_in[-2]) * R_in[-1] * (~R_in[-2]) * R_in[-1])
# - np.log((~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1] * (~R_in[-2]) * R_in[-1]
# * np.exp((np.log(((~R_in[-1]) * R_in[-2] * (~R_in[-1])) * R_in[-1] * (~R_in[-2]) * R_in[-1] * (~R_in[-2]) * R_in[-1])
# - np.log((~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1])) * -0.25)
# * np.exp((np.log((~R_in[-2]) * R_in[-1])
# - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
B[-1] = R_in[-1] * (~R_in[-2]) * R_in[-1]
# Use the coefficients at the corresponding t_out indices to
# compute the squad interpolant
# R_ip1 = np.array(np.roll(R_in, -1)[i_in_for_out])
# R_ip1[-1] = R_in[-1]*(~R_in[-2])*R_in[-1]
R_ip1 = np.roll(R_in, -1)
R_ip1[-1] = R_in[-1]*(~R_in[-2])*R_in[-1]
R_ip1 = np.array(R_ip1[i_in_for_out])
t_inp1 = np.roll(t_in, -1)
t_inp1[-1] = t_in[-1] + (t_in[-1] - t_in[-2])
tau = (t_out - t_in[i_in_for_out]) / ((t_inp1 - t_in)[i_in_for_out])
# tau = (t_out - t_in[i_in_for_out]) / ((np.roll(t_in, -1) - t_in)[i_in_for_out])
R_out = np.squad_vectorized(tau, R_in[i_in_for_out], A[i_in_for_out], B[i_in_for_out], R_ip1)
return R_out
@njit
def frame_from_angular_velocity_integrand(rfrak, Omega):
import math
from numpy import dot, cross
from .numpy_quaternion import _eps
rfrakMag = math.sqrt(rfrak[0] * rfrak[0] + rfrak[1] * rfrak[1] + rfrak[2] * rfrak[2])
OmegaMag = math.sqrt(Omega[0] * Omega[0] + Omega[1] * Omega[1] + Omega[2] * Omega[2])
# If the matrix is really close to the identity, return
if rfrakMag < _eps * OmegaMag:
return Omega[0] / 2.0, Omega[1] / 2.0, Omega[2] / 2.0
# If the matrix is really close to singular, it's equivalent to the identity, so return
if abs(math.sin(rfrakMag)) < _eps:
return Omega[0] / 2.0, Omega[1] / 2.0, Omega[2] / 2.0
OmegaOver2 = Omega[0] / 2.0, Omega[1] / 2.0, Omega[2] / 2.0
rfrakHat = rfrak[0] / rfrakMag, rfrak[1] / rfrakMag, rfrak[2] / rfrakMag
return ((OmegaOver2 - rfrakHat * dot(rfrakHat, OmegaOver2)) * (rfrakMag / math.tan(rfrakMag))
+ rfrakHat * dot(rfrakHat, OmegaOver2) + cross(OmegaOver2, rfrak))
class appending_array(object):
def __init__(self, shape, dtype=np.float, initial_array=None):
shape = list(shape)
if shape[0] < 4:
shape[0] = 4
self._a = np.empty(shape, dtype=dtype)
self.n = 0
if initial_array is not None:
assert initial_array.dtype == dtype
assert initial_array.shape[1:] == shape[1:]
assert initial_array.shape[0] <= shape[0]
self.n = initial_array.shape[0]
self._a[:self.n, ...] = initial_array[:]
def append(self, row):
self.n += 1
if self.n > self._a.shape[0]:
self._a = np.resize(self._a, (2*self._a.shape[0],)+self._a.shape[1:])
self._a[self.n-1, ...] = row
@property
def a(self):
return self._a[:self.n, ...]
def minimal_rotation(R, t, iterations=2):
"""Adjust frame so that there is no rotation about z' axis
The output of this function is a frame that rotates the z axis onto the same z' axis as the
input frame, but with minimal rotation about that axis. This is done by pre-composing the input
rotation with a rotation about the z axis through an angle gamma, where
dgamma/dt = 2*(dR/dt * z * R.conjugate()).w
This ensures that the angular velocity has no component along the z' axis.
Note that this condition becomes easier to impose the closer the input rotation is to a
minimally rotating frame, which means that repeated application of this function improves its
accuracy. By default, this function is iterated twice, though a few more iterations may be
called for.
Parameters
==========
R: quaternion array
Time series describing rotation
t: float array
Corresponding times at which R is measured
iterations: int [defaults to 2]
Repeat the minimization to refine the result
"""
from scipy.interpolate import InterpolatedUnivariateSpline as spline
if iterations == 0:
return R
R = quaternion.as_float_array(R)
Rdot = np.empty_like(R)
for i in range(4):
Rdot[:, i] = spline(t, R[:, i]).derivative()(t)
R = quaternion.from_float_array(R)
Rdot = quaternion.from_float_array(Rdot)
halfgammadot = quaternion.as_float_array(Rdot * quaternion.z * R.conjugate())[:, 0]
halfgamma = spline(t, halfgammadot).antiderivative()(t)
Rgamma = np.exp(quaternion.z * halfgamma)
return minimal_rotation(R * Rgamma, t, iterations=iterations-1)
def angular_velocity(R, t):
from scipy.interpolate import InterpolatedUnivariateSpline as spline
R = quaternion.as_float_array(R)
Rdot = np.empty_like(R)
for i in range(4):
Rdot[:, i] = spline(t, R[:, i]).derivative()(t)
R = quaternion.from_float_array(R)
Rdot = quaternion.from_float_array(Rdot)
return np.array([omega.vec for omega in (2*Rdot/R)])
|
moble/quaternion
|
quaternion_time_series.py
|
minimal_rotation
|
python
|
def minimal_rotation(R, t, iterations=2):
from scipy.interpolate import InterpolatedUnivariateSpline as spline
if iterations == 0:
return R
R = quaternion.as_float_array(R)
Rdot = np.empty_like(R)
for i in range(4):
Rdot[:, i] = spline(t, R[:, i]).derivative()(t)
R = quaternion.from_float_array(R)
Rdot = quaternion.from_float_array(Rdot)
halfgammadot = quaternion.as_float_array(Rdot * quaternion.z * R.conjugate())[:, 0]
halfgamma = spline(t, halfgammadot).antiderivative()(t)
Rgamma = np.exp(quaternion.z * halfgamma)
return minimal_rotation(R * Rgamma, t, iterations=iterations-1)
|
Adjust frame so that there is no rotation about z' axis
The output of this function is a frame that rotates the z axis onto the same z' axis as the
input frame, but with minimal rotation about that axis. This is done by pre-composing the input
rotation with a rotation about the z axis through an angle gamma, where
dgamma/dt = 2*(dR/dt * z * R.conjugate()).w
This ensures that the angular velocity has no component along the z' axis.
Note that this condition becomes easier to impose the closer the input rotation is to a
minimally rotating frame, which means that repeated application of this function improves its
accuracy. By default, this function is iterated twice, though a few more iterations may be
called for.
Parameters
==========
R: quaternion array
Time series describing rotation
t: float array
Corresponding times at which R is measured
iterations: int [defaults to 2]
Repeat the minimization to refine the result
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/quaternion_time_series.py#L294-L332
|
[
"def minimal_rotation(R, t, iterations=2):\n \"\"\"Adjust frame so that there is no rotation about z' axis\n\n The output of this function is a frame that rotates the z axis onto the same z' axis as the\n input frame, but with minimal rotation about that axis. This is done by pre-composing the input\n rotation with a rotation about the z axis through an angle gamma, where\n\n dgamma/dt = 2*(dR/dt * z * R.conjugate()).w\n\n This ensures that the angular velocity has no component along the z' axis.\n\n Note that this condition becomes easier to impose the closer the input rotation is to a\n minimally rotating frame, which means that repeated application of this function improves its\n accuracy. By default, this function is iterated twice, though a few more iterations may be\n called for.\n\n Parameters\n ==========\n R: quaternion array\n Time series describing rotation\n t: float array\n Corresponding times at which R is measured\n iterations: int [defaults to 2]\n Repeat the minimization to refine the result\n\n \"\"\"\n from scipy.interpolate import InterpolatedUnivariateSpline as spline\n if iterations == 0:\n return R\n R = quaternion.as_float_array(R)\n Rdot = np.empty_like(R)\n for i in range(4):\n Rdot[:, i] = spline(t, R[:, i]).derivative()(t)\n R = quaternion.from_float_array(R)\n Rdot = quaternion.from_float_array(Rdot)\n halfgammadot = quaternion.as_float_array(Rdot * quaternion.z * R.conjugate())[:, 0]\n halfgamma = spline(t, halfgammadot).antiderivative()(t)\n Rgamma = np.exp(quaternion.z * halfgamma)\n return minimal_rotation(R * Rgamma, t, iterations=iterations-1)\n",
"def as_float_array(a):\n \"\"\"View the quaternion array as an array of floats\n\n This function is fast (of order 1 microsecond) because no data is\n copied; the returned quantity is just a \"view\" of the original.\n\n The output view has one more dimension (of size 4) than the input\n array, but is otherwise the same shape.\n\n \"\"\"\n return np.asarray(a, dtype=np.quaternion).view((np.double, 4))\n",
"def from_float_array(a):\n return as_quat_array(a)\n"
] |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import print_function, division, absolute_import
import numpy as np
import quaternion
from quaternion.numba_wrapper import njit
def slerp(R1, R2, t1, t2, t_out):
"""Spherical linear interpolation of rotors
This function uses a simpler interface than the more fundamental
`slerp_evaluate` and `slerp_vectorized` functions. The latter
are fast, being implemented at the C level, but take input `tau`
instead of time. This function adjusts the time accordingly.
Parameters
----------
R1: quaternion
Quaternion at beginning of interpolation
R2: quaternion
Quaternion at end of interpolation
t1: float
Time corresponding to R1
t2: float
Time corresponding to R2
t_out: float or array of floats
Times to which the rotors should be interpolated
"""
tau = (t_out-t1)/(t2-t1)
return np.slerp_vectorized(R1, R2, tau)
def squad(R_in, t_in, t_out):
"""Spherical "quadrangular" interpolation of rotors with a cubic spline
This is the best way to interpolate rotations. It uses the analog
of a cubic spline, except that the interpolant is confined to the
rotor manifold in a natural way. Alternative methods involving
interpolation of other coordinates on the rotation group or
normalization of interpolated values give bad results. The
results from this method are as natural as any, and are continuous
in first and second derivatives.
The input `R_in` rotors are assumed to be reasonably continuous
(no sign flips), and the input `t` arrays are assumed to be
sorted. No checking is done for either case, and you may get
silently bad results if these conditions are violated.
This function simplifies the calling, compared to `squad_evaluate`
(which takes a set of four quaternions forming the edges of the
"quadrangle", and the normalized time `tau`) and `squad_vectorized`
(which takes the same arguments, but in array form, and efficiently
loops over them).
Parameters
----------
R_in: array of quaternions
A time-series of rotors (unit quaternions) to be interpolated
t_in: array of float
The times corresponding to R_in
t_out: array of float
The times to which R_in should be interpolated
"""
if R_in.size == 0 or t_out.size == 0:
return np.array((), dtype=np.quaternion)
# This list contains an index for each `t_out` such that
# t_in[i-1] <= t_out < t_in[i]
# Note that `side='right'` is much faster in my tests
# i_in_for_out = t_in.searchsorted(t_out, side='left')
# np.clip(i_in_for_out, 0, len(t_in) - 1, out=i_in_for_out)
i_in_for_out = t_in.searchsorted(t_out, side='right')-1
# Now, for each index `i` in `i_in`, we need to compute the
# interpolation "coefficients" (`A_i`, `B_ip1`).
#
# I previously tested an explicit version of the loops below,
# comparing `stride_tricks.as_strided` with explicit
# implementation via `roll` (as seen here). I found that the
# `roll` was significantly more efficient for simple calculations,
# though the difference is probably totally washed out here. In
# any case, it might be useful to test again.
#
A = R_in * np.exp((- np.log((~R_in) * np.roll(R_in, -1))
+ np.log((~np.roll(R_in, 1)) * R_in) * ((np.roll(t_in, -1) - t_in) / (t_in - np.roll(t_in, 1)))
) * 0.25)
B = np.roll(R_in, -1) * np.exp((np.log((~np.roll(R_in, -1)) * np.roll(R_in, -2))
* ((np.roll(t_in, -1) - t_in) / (np.roll(t_in, -2) - np.roll(t_in, -1)))
- np.log((~R_in) * np.roll(R_in, -1))) * -0.25)
# Correct the first and last A time steps, and last two B time steps. We extend R_in with the following wrap-around
# values:
# R_in[0-1] = R_in[0]*(~R_in[1])*R_in[0]
# R_in[n+0] = R_in[-1] * (~R_in[-2]) * R_in[-1]
# R_in[n+1] = R_in[0] * (~R_in[-1]) * R_in[0]
# = R_in[-1] * (~R_in[-2]) * R_in[-1] * (~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1]
# = R_in[-1] * (~R_in[-2]) * R_in[-1] * (~R_in[-2]) * R_in[-1]
# A[i] = R_in[i] * np.exp((- np.log((~R_in[i]) * R_in[i+1])
# + np.log((~R_in[i-1]) * R_in[i]) * ((t_in[i+1] - t_in[i]) / (t_in[i] - t_in[i-1]))
# ) * 0.25)
# A[0] = R_in[0] * np.exp((- np.log((~R_in[0]) * R_in[1]) + np.log((~R_in[0])*R_in[1]*(~R_in[0])) * R_in[0]) * 0.25)
# = R_in[0]
A[0] = R_in[0]
# A[-1] = R_in[-1] * np.exp((- np.log((~R_in[-1]) * R_in[n+0])
# + np.log((~R_in[-2]) * R_in[-1]) * ((t_in[n+0] - t_in[-1]) / (t_in[-1] - t_in[-2]))
# ) * 0.25)
# = R_in[-1] * np.exp((- np.log((~R_in[-1]) * R_in[n+0]) + np.log((~R_in[-2]) * R_in[-1])) * 0.25)
# = R_in[-1] * np.exp((- np.log((~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1])
# + np.log((~R_in[-2]) * R_in[-1])) * 0.25)
# = R_in[-1] * np.exp((- np.log((~R_in[-2]) * R_in[-1]) + np.log((~R_in[-2]) * R_in[-1])) * 0.25)
# = R_in[-1]
A[-1] = R_in[-1]
# B[i] = R_in[i+1] * np.exp((np.log((~R_in[i+1]) * R_in[i+2]) * ((t_in[i+1] - t_in[i]) / (t_in[i+2] - t_in[i+1]))
# - np.log((~R_in[i]) * R_in[i+1])) * -0.25)
# B[-2] = R_in[-1] * np.exp((np.log((~R_in[-1]) * R_in[0]) * ((t_in[-1] - t_in[-2]) / (t_in[0] - t_in[-1]))
# - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1] * np.exp((np.log((~R_in[-1]) * R_in[0]) - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1] * np.exp((np.log((~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1])
# - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1] * np.exp((np.log((~R_in[-2]) * R_in[-1]) - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1]
B[-2] = R_in[-1]
# B[-1] = R_in[0]
# B[-1] = R_in[0] * np.exp((np.log((~R_in[0]) * R_in[1]) - np.log((~R_in[-1]) * R_in[0])) * -0.25)
# = R_in[-1] * (~R_in[-2]) * R_in[-1]
# * np.exp((np.log((~(R_in[-1] * (~R_in[-2]) * R_in[-1])) * R_in[-1] * (~R_in[-2]) * R_in[-1] * (~R_in[-2]) * R_in[-1])
# - np.log((~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1])) * -0.25)
# = R_in[-1] * (~R_in[-2]) * R_in[-1]
# * np.exp((np.log(((~R_in[-1]) * R_in[-2] * (~R_in[-1])) * R_in[-1] * (~R_in[-2]) * R_in[-1] * (~R_in[-2]) * R_in[-1])
# - np.log((~R_in[-1]) * R_in[-1] * (~R_in[-2]) * R_in[-1])) * -0.25)
# * np.exp((np.log((~R_in[-2]) * R_in[-1])
# - np.log((~R_in[-2]) * R_in[-1])) * -0.25)
B[-1] = R_in[-1] * (~R_in[-2]) * R_in[-1]
# Use the coefficients at the corresponding t_out indices to
# compute the squad interpolant
# R_ip1 = np.array(np.roll(R_in, -1)[i_in_for_out])
# R_ip1[-1] = R_in[-1]*(~R_in[-2])*R_in[-1]
R_ip1 = np.roll(R_in, -1)
R_ip1[-1] = R_in[-1]*(~R_in[-2])*R_in[-1]
R_ip1 = np.array(R_ip1[i_in_for_out])
t_inp1 = np.roll(t_in, -1)
t_inp1[-1] = t_in[-1] + (t_in[-1] - t_in[-2])
tau = (t_out - t_in[i_in_for_out]) / ((t_inp1 - t_in)[i_in_for_out])
# tau = (t_out - t_in[i_in_for_out]) / ((np.roll(t_in, -1) - t_in)[i_in_for_out])
R_out = np.squad_vectorized(tau, R_in[i_in_for_out], A[i_in_for_out], B[i_in_for_out], R_ip1)
return R_out
@njit
def frame_from_angular_velocity_integrand(rfrak, Omega):
import math
from numpy import dot, cross
from .numpy_quaternion import _eps
rfrakMag = math.sqrt(rfrak[0] * rfrak[0] + rfrak[1] * rfrak[1] + rfrak[2] * rfrak[2])
OmegaMag = math.sqrt(Omega[0] * Omega[0] + Omega[1] * Omega[1] + Omega[2] * Omega[2])
# If the matrix is really close to the identity, return
if rfrakMag < _eps * OmegaMag:
return Omega[0] / 2.0, Omega[1] / 2.0, Omega[2] / 2.0
# If the matrix is really close to singular, it's equivalent to the identity, so return
if abs(math.sin(rfrakMag)) < _eps:
return Omega[0] / 2.0, Omega[1] / 2.0, Omega[2] / 2.0
OmegaOver2 = Omega[0] / 2.0, Omega[1] / 2.0, Omega[2] / 2.0
rfrakHat = rfrak[0] / rfrakMag, rfrak[1] / rfrakMag, rfrak[2] / rfrakMag
return ((OmegaOver2 - rfrakHat * dot(rfrakHat, OmegaOver2)) * (rfrakMag / math.tan(rfrakMag))
+ rfrakHat * dot(rfrakHat, OmegaOver2) + cross(OmegaOver2, rfrak))
class appending_array(object):
def __init__(self, shape, dtype=np.float, initial_array=None):
shape = list(shape)
if shape[0] < 4:
shape[0] = 4
self._a = np.empty(shape, dtype=dtype)
self.n = 0
if initial_array is not None:
assert initial_array.dtype == dtype
assert initial_array.shape[1:] == shape[1:]
assert initial_array.shape[0] <= shape[0]
self.n = initial_array.shape[0]
self._a[:self.n, ...] = initial_array[:]
def append(self, row):
self.n += 1
if self.n > self._a.shape[0]:
self._a = np.resize(self._a, (2*self._a.shape[0],)+self._a.shape[1:])
self._a[self.n-1, ...] = row
@property
def a(self):
return self._a[:self.n, ...]
def integrate_angular_velocity(Omega, t0, t1, R0=None, tolerance=1e-12):
"""Compute frame with given angular velocity
Parameters
==========
Omega: tuple or callable
Angular velocity from which to compute frame. Can be
1) a 2-tuple of float arrays (t, v) giving the angular velocity vector at a series of times,
2) a function of time that returns the 3-vector angular velocity, or
3) a function of time and orientation (t, R) that returns the 3-vector angular velocity
In case 1, the angular velocity will be interpolated to the required times. Note that accuracy
is poor in case 1.
t0: float
Initial time
t1: float
Final time
R0: quaternion, optional
Initial frame orientation. Defaults to 1 (the identity orientation).
tolerance: float, optional
Absolute tolerance used in integration. Defaults to 1e-12.
Returns
=======
t: float array
R: quaternion array
"""
import warnings
from scipy.integrate import ode
if R0 is None:
R0 = quaternion.one
input_is_tabulated = False
try:
t_Omega, v = Omega
from scipy.interpolate import InterpolatedUnivariateSpline
Omega_x = InterpolatedUnivariateSpline(t_Omega, v[:, 0])
Omega_y = InterpolatedUnivariateSpline(t_Omega, v[:, 1])
Omega_z = InterpolatedUnivariateSpline(t_Omega, v[:, 2])
def Omega_func(t, R):
return [Omega_x(t), Omega_y(t), Omega_z(t)]
Omega_func(t0, R0)
input_is_tabulated = True
except (TypeError, ValueError):
def Omega_func(t, R):
return Omega(t, R)
try:
Omega_func(t0, R0)
except TypeError:
def Omega_func(t, R):
return Omega(t)
Omega_func(t0, R0)
def RHS(t, y):
R = quaternion.quaternion(*y)
return (0.5 * quaternion.quaternion(0.0, *Omega_func(t, R)) * R).components
y0 = R0.components
if input_is_tabulated:
from scipy.integrate import solve_ivp
t = t_Omega
t_span = [t_Omega[0], t_Omega[-1]]
solution = solve_ivp(RHS, t_span, y0, t_eval=t_Omega, atol=tolerance, rtol=100*np.finfo(float).eps)
R = quaternion.from_float_array(solution.y.T)
else:
solver = ode(RHS)
solver.set_initial_value(y0, t0)
solver.set_integrator('dop853', nsteps=1, atol=tolerance, rtol=0.0)
solver._integrator.iwork[2] = -1 # suppress Fortran-printed warning
t = appending_array((int(t1-t0),))
t.append(solver.t)
R = appending_array((int(t1-t0), 4))
R.append(solver.y)
warnings.filterwarnings("ignore", category=UserWarning)
t_last = solver.t
while solver.t < t1:
solver.integrate(t1, step=True)
if solver.t > t_last:
t.append(solver.t)
R.append(solver.y)
t_last = solver.t
warnings.resetwarnings()
t = t.a
R = quaternion.as_quat_array(R.a)
return t, R
def angular_velocity(R, t):
from scipy.interpolate import InterpolatedUnivariateSpline as spline
R = quaternion.as_float_array(R)
Rdot = np.empty_like(R)
for i in range(4):
Rdot[:, i] = spline(t, R[:, i]).derivative()(t)
R = quaternion.from_float_array(R)
Rdot = quaternion.from_float_array(Rdot)
return np.array([omega.vec for omega in (2*Rdot/R)])
|
moble/quaternion
|
means.py
|
mean_rotor_in_chordal_metric
|
python
|
def mean_rotor_in_chordal_metric(R, t=None):
if not t:
return np.quaternion(*(np.sum(as_float_array(R)))).normalized()
mean = np.empty((4,), dtype=float)
definite_integral(as_float_array(R), t, mean)
return np.quaternion(*mean).normalized()
|
Return rotor that is closest to all R in the least-squares sense
This can be done (quasi-)analytically because of the simplicity of
the chordal metric function. The only approximation is the simple
2nd-order discrete formula for the definite integral of the input
rotor function.
Note that the `t` argument is optional. If it is present, the
times are used to weight the corresponding integral. If it is not
present, a simple sum is used instead (which may be slightly
faster).
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/means.py#L11-L29
|
[
"def _identity_decorator_inner(fn):\n return fn\n"
] |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import division, print_function, absolute_import
import numpy as np
from .calculus import definite_integral
def optimal_alignment_in_chordal_metric(Ra, Rb, t=None):
"""Return Rd such that Rd*Rb is as close to Ra as possible
This function simply encapsulates the mean rotor of Ra/Rb.
As in the `mean_rotor_in_chordal_metric` function, the `t`
argument is optional. If it is present, the times are used to
weight the corresponding integral. If it is not present, a simple
sum is used instead (which may be slightly faster).
"""
return mean_rotor_in_chordal_metric(Ra / Rb, t)
def mean_rotor_in_intrinsic_metric(R, t=None):
raise NotImplementedError()
|
moble/quaternion
|
__init__.py
|
as_float_array
|
python
|
def as_float_array(a):
return np.asarray(a, dtype=np.quaternion).view((np.double, 4))
|
View the quaternion array as an array of floats
This function is fast (of order 1 microsecond) because no data is
copied; the returned quantity is just a "view" of the original.
The output view has one more dimension (of size 4) than the input
array, but is otherwise the same shape.
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/__init__.py#L53-L63
| null |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import division, print_function, absolute_import
import numpy as np
from .numpy_quaternion import (quaternion, _eps,
slerp_evaluate, squad_evaluate,
# slerp_vectorized, squad_vectorized,
# slerp, squad,
)
from .quaternion_time_series import slerp, squad, integrate_angular_velocity, minimal_rotation
from .calculus import derivative, definite_integral, indefinite_integral
from .means import mean_rotor_in_chordal_metric, optimal_alignment_in_chordal_metric
from ._version import __version__
__doc_title__ = "Quaternion dtype for NumPy"
__doc__ = "Adds a quaternion dtype to NumPy."
__all__ = ['quaternion',
'as_quat_array', 'as_spinor_array',
'as_float_array', 'from_float_array',
'as_rotation_matrix', 'from_rotation_matrix',
'as_rotation_vector', 'from_rotation_vector',
'as_euler_angles', 'from_euler_angles',
'as_spherical_coords', 'from_spherical_coords',
'rotate_vectors', 'allclose',
'rotor_intrinsic_distance', 'rotor_chordal_distance',
'rotation_intrinsic_distance', 'rotation_chordal_distance',
'slerp_evaluate', 'squad_evaluate',
'zero', 'one', 'x', 'y', 'z', 'integrate_angular_velocity',
'squad', 'slerp', 'derivative', 'definite_integral', 'indefinite_integral']
if 'quaternion' in np.__dict__:
raise RuntimeError('The NumPy package already has a quaternion type')
np.quaternion = quaternion
np.typeDict['quaternion'] = np.dtype(quaternion)
zero = np.quaternion(0, 0, 0, 0)
one = np.quaternion(1, 0, 0, 0)
x = np.quaternion(0, 1, 0, 0)
y = np.quaternion(0, 0, 1, 0)
z = np.quaternion(0, 0, 0, 1)
rotor_intrinsic_distance = np.rotor_intrinsic_distance
rotor_chordal_distance = np.rotor_chordal_distance
rotation_intrinsic_distance = np.rotation_intrinsic_distance
rotation_chordal_distance = np.rotation_chordal_distance
def as_quat_array(a):
"""View a float array as an array of quaternions
The input array must have a final dimension whose size is
divisible by four (or better yet *is* 4), because successive
indices in that last dimension will be considered successive
components of the output quaternion.
This function is usually fast (of order 1 microsecond) because no
data is copied; the returned quantity is just a "view" of the
original. However, if the input array is not C-contiguous
(basically, as you increment the index into the last dimension of
the array, you just move to the neighboring float in memory), the
data will need to be copied which may be quite slow. Therefore,
you should try to ensure that the input array is in that order.
Slices and transpositions will frequently break that rule.
We will not convert back from a two-spinor array because there is
no unique convention for them, so I don't want to mess with that.
Also, we want to discourage users from the slow, memory-copying
process of swapping columns required for useful definitions of
the two-spinors.
"""
a = np.asarray(a, dtype=np.double)
# fast path
if a.shape == (4,):
return quaternion(a[0], a[1], a[2], a[3])
# view only works if the last axis is C-contiguous
if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:
a = a.copy(order='C')
try:
av = a.view(np.quaternion)
except ValueError as e:
message = (str(e) + '\n '
+ 'Failed to view input data as a series of quaternions. '
+ 'Please ensure that the last dimension has size divisible by 4.\n '
+ 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))
raise ValueError(message)
# special case: don't create an axis for a single quaternion, to
# match the output of `as_float_array`
if av.shape[-1] == 1:
av = av.reshape(a.shape[:-1])
return av
def from_float_array(a):
return as_quat_array(a)
def as_spinor_array(a):
"""View a quaternion array as spinors in two-complex representation
This function is relatively slow and scales poorly, because memory
copying is apparently involved -- I think it's due to the
"advanced indexing" required to swap the columns.
"""
a = np.atleast_1d(a)
assert a.dtype == np.dtype(np.quaternion)
# I'm not sure why it has to be so complicated, but all of these steps
# appear to be necessary in this case.
return a.view(np.float).reshape(a.shape + (4,))[..., [0, 3, 2, 1]].ravel().view(np.complex).reshape(a.shape + (2,))
def as_rotation_matrix(q):
"""Convert input quaternion to 3x3 rotation matrix
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,3). This matrix should multiply (from
the left) a column vector to produce the rotated column vector.
Raises
------
ZeroDivisionError
If any of the input quaternions have norm 0.0.
"""
if q.shape == () and not isinstance(q, np.ndarray): # This is just a single quaternion
n = q.norm()
if n == 0.0:
raise ZeroDivisionError("Input to `as_rotation_matrix({0})` has zero norm".format(q))
elif abs(n-1.0) < _eps: # Input q is basically normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2), 2*(q.x*q.y - q.z*q.w), 2*(q.x*q.z + q.y*q.w)],
[2*(q.x*q.y + q.z*q.w), 1 - 2*(q.x**2 + q.z**2), 2*(q.y*q.z - q.x*q.w)],
[2*(q.x*q.z - q.y*q.w), 2*(q.y*q.z + q.x*q.w), 1 - 2*(q.x**2 + q.y**2)]
])
else: # Input q is not normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2)/n, 2*(q.x*q.y - q.z*q.w)/n, 2*(q.x*q.z + q.y*q.w)/n],
[2*(q.x*q.y + q.z*q.w)/n, 1 - 2*(q.x**2 + q.z**2)/n, 2*(q.y*q.z - q.x*q.w)/n],
[2*(q.x*q.z - q.y*q.w)/n, 2*(q.y*q.z + q.x*q.w)/n, 1 - 2*(q.x**2 + q.y**2)/n]
])
else: # This is an array of quaternions
n = np.norm(q)
if np.any(n == 0.0):
raise ZeroDivisionError("Array input to `as_rotation_matrix` has at least one element with zero norm")
else: # Assume input q is not normalized
m = np.empty(q.shape + (3, 3))
q = as_float_array(q)
m[..., 0, 0] = 1.0 - 2*(q[..., 2]**2 + q[..., 3]**2)/n
m[..., 0, 1] = 2*(q[..., 1]*q[..., 2] - q[..., 3]*q[..., 0])/n
m[..., 0, 2] = 2*(q[..., 1]*q[..., 3] + q[..., 2]*q[..., 0])/n
m[..., 1, 0] = 2*(q[..., 1]*q[..., 2] + q[..., 3]*q[..., 0])/n
m[..., 1, 1] = 1.0 - 2*(q[..., 1]**2 + q[..., 3]**2)/n
m[..., 1, 2] = 2*(q[..., 2]*q[..., 3] - q[..., 1]*q[..., 0])/n
m[..., 2, 0] = 2*(q[..., 1]*q[..., 3] - q[..., 2]*q[..., 0])/n
m[..., 2, 1] = 2*(q[..., 2]*q[..., 3] + q[..., 1]*q[..., 0])/n
m[..., 2, 2] = 1.0 - 2*(q[..., 1]**2 + q[..., 2]**2)/n
return m
def from_rotation_matrix(rot, nonorthogonal=True):
"""Convert input 3x3 rotation matrix to unit quaternion
By default, if scipy.linalg is available, this function uses
Bar-Itzhack's algorithm to allow for non-orthogonal matrices.
[J. Guidance, Vol. 23, No. 6, p. 1085 <http://dx.doi.org/10.2514/2.4654>]
This will almost certainly be quite a bit slower than simpler versions,
though it will be more robust to numerical errors in the rotation matrix.
Also note that Bar-Itzhack uses some pretty weird conventions. The last
component of the quaternion appears to represent the scalar, and the
quaternion itself is conjugated relative to the convention used
throughout this module.
If scipy.linalg is not available or if the optional
`nonorthogonal` parameter is set to `False`, this function falls
back to the possibly faster, but less robust, algorithm of Markley
[J. Guidance, Vol. 31, No. 2, p. 440
<http://dx.doi.org/10.2514/1.31730>].
Parameters
----------
rot: (...Nx3x3) float array
Each 3x3 matrix represents a rotation by multiplying (from the left)
a column vector to produce a rotated column vector. Note that this
input may actually have ndims>3; it is just assumed that the last
two dimensions have size 3, representing the matrix.
nonorthogonal: bool, optional
If scipy.linalg is available, use the more robust algorithm of
Bar-Itzhack. Default value is True.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-2].
Raises
------
LinAlgError
If any of the eigenvalue solutions does not converge
"""
try:
from scipy import linalg
except ImportError:
linalg = False
rot = np.array(rot, copy=False)
shape = rot.shape[:-2]
if linalg and nonorthogonal:
from operator import mul
from functools import reduce
K3 = np.empty(shape+(4, 4))
K3[..., 0, 0] = (rot[..., 0, 0] - rot[..., 1, 1] - rot[..., 2, 2])/3.0
K3[..., 0, 1] = (rot[..., 1, 0] + rot[..., 0, 1])/3.0
K3[..., 0, 2] = (rot[..., 2, 0] + rot[..., 0, 2])/3.0
K3[..., 0, 3] = (rot[..., 1, 2] - rot[..., 2, 1])/3.0
K3[..., 1, 0] = K3[..., 0, 1]
K3[..., 1, 1] = (rot[..., 1, 1] - rot[..., 0, 0] - rot[..., 2, 2])/3.0
K3[..., 1, 2] = (rot[..., 2, 1] + rot[..., 1, 2])/3.0
K3[..., 1, 3] = (rot[..., 2, 0] - rot[..., 0, 2])/3.0
K3[..., 2, 0] = K3[..., 0, 2]
K3[..., 2, 1] = K3[..., 1, 2]
K3[..., 2, 2] = (rot[..., 2, 2] - rot[..., 0, 0] - rot[..., 1, 1])/3.0
K3[..., 2, 3] = (rot[..., 0, 1] - rot[..., 1, 0])/3.0
K3[..., 3, 0] = K3[..., 0, 3]
K3[..., 3, 1] = K3[..., 1, 3]
K3[..., 3, 2] = K3[..., 2, 3]
K3[..., 3, 3] = (rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2])/3.0
if not shape:
q = zero.copy()
eigvals, eigvecs = linalg.eigh(K3.T, eigvals=(3, 3))
q.components[0] = eigvecs[-1]
q.components[1:] = -eigvecs[:-1].flatten()
return q
else:
q = np.empty(shape+(4,), dtype=np.float)
for flat_index in range(reduce(mul, shape)):
multi_index = np.unravel_index(flat_index, shape)
eigvals, eigvecs = linalg.eigh(K3[multi_index], eigvals=(3, 3))
q[multi_index+(0,)] = eigvecs[-1]
q[multi_index+(slice(1,None),)] = -eigvecs[:-1].flatten()
return as_quat_array(q)
else: # No scipy.linalg or not `nonorthogonal`
diagonals = np.empty(shape+(4,))
diagonals[..., 0] = rot[..., 0, 0]
diagonals[..., 1] = rot[..., 1, 1]
diagonals[..., 2] = rot[..., 2, 2]
diagonals[..., 3] = rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2]
indices = np.argmax(diagonals, axis=-1)
q = diagonals # reuse storage space
indices_i = (indices == 0)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 1] = 1 + rot_i[..., 0, 0] - rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 2] = rot_i[..., 0, 1] + rot_i[..., 1, 0]
q[indices_i, 3] = rot_i[..., 0, 2] + rot_i[..., 2, 0]
indices_i = (indices == 1)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 1] = rot_i[..., 1, 0] + rot_i[..., 0, 1]
q[indices_i, 2] = 1 - rot_i[..., 0, 0] + rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 3] = rot_i[..., 1, 2] + rot_i[..., 2, 1]
indices_i = (indices == 2)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q[indices_i, 1] = rot_i[..., 2, 0] + rot_i[..., 0, 2]
q[indices_i, 2] = rot_i[..., 2, 1] + rot_i[..., 1, 2]
q[indices_i, 3] = 1 - rot_i[..., 0, 0] - rot_i[..., 1, 1] + rot_i[..., 2, 2]
indices_i = (indices == 3)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = 1 + rot_i[..., 0, 0] + rot_i[..., 1, 1] + rot_i[..., 2, 2]
q[indices_i, 1] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 2] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 3] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q /= np.linalg.norm(q, axis=-1)[..., np.newaxis]
return as_quat_array(q)
def as_rotation_vector(q):
"""Convert input quaternion to the axis-angle representation
Note that if any of the input quaternions has norm zero, no error is
raised, but NaNs will appear in the output.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,). Each vector represents the axis of
the rotation, with norm proportional to the angle of the rotation in
radians.
"""
return as_float_array(2*np.log(np.normalized(q)))[..., 1:]
def from_rotation_vector(rot):
"""Convert input 3-vector in axis-angle representation to unit quaternion
Parameters
----------
rot: (Nx3) float array
Each vector represents the axis of the rotation, with norm
proportional to the angle of the rotation in radians.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-1].
"""
rot = np.array(rot, copy=False)
quats = np.zeros(rot.shape[:-1]+(4,))
quats[..., 1:] = rot[...]/2
quats = as_quat_array(quats)
return np.exp(quats)
def as_euler_angles(q):
"""Open Pandora's Box
If somebody is trying to make you use Euler angles, tell them no, and
walk away, and go and tell your mum.
You don't want to use Euler angles. They are awful. Stay away. It's
one thing to convert from Euler angles to quaternions; at least you're
moving in the right direction. But to go the other way?! It's just not
right.
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles are naturally in radians.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
alpha_beta_gamma: float array
Output shape is q.shape+(3,). These represent the angles (alpha,
beta, gamma) in radians, where the normalized input quaternion
represents `exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)`.
Raises
------
AllHell
...if you try to actually use Euler angles, when you could have
been using quaternions like a sensible person.
"""
alpha_beta_gamma = np.empty(q.shape + (3,), dtype=np.float)
n = np.norm(q)
q = as_float_array(q)
alpha_beta_gamma[..., 0] = np.arctan2(q[..., 3], q[..., 0]) + np.arctan2(-q[..., 1], q[..., 2])
alpha_beta_gamma[..., 1] = 2*np.arccos(np.sqrt((q[..., 0]**2 + q[..., 3]**2)/n))
alpha_beta_gamma[..., 2] = np.arctan2(q[..., 3], q[..., 0]) - np.arctan2(-q[..., 1], q[..., 2])
return alpha_beta_gamma
def from_euler_angles(alpha_beta_gamma, beta=None, gamma=None):
"""Improve your life drastically
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles naturally must be in radians for this to make any sense.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
alpha_beta_gamma: float or array of floats
This argument may either contain an array with last dimension of
size 3, where those three elements describe the (alpha, beta, gamma)
radian values for each rotation; or it may contain just the alpha
values, in which case the next two arguments must also be given.
beta: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and third arguments.
gamma: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and second arguments.
Returns
-------
R: quaternion array
The shape of this array will be the same as the input, except that
the last dimension will be removed.
"""
# Figure out the input angles from either type of input
if gamma is None:
alpha_beta_gamma = np.asarray(alpha_beta_gamma, dtype=np.double)
alpha = alpha_beta_gamma[..., 0]
beta = alpha_beta_gamma[..., 1]
gamma = alpha_beta_gamma[..., 2]
else:
alpha = np.asarray(alpha_beta_gamma, dtype=np.double)
beta = np.asarray(beta, dtype=np.double)
gamma = np.asarray(gamma, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(alpha, beta, gamma).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(beta/2)*np.cos((alpha+gamma)/2) # scalar quaternion components
R[..., 1] = -np.sin(beta/2)*np.sin((alpha-gamma)/2) # x quaternion components
R[..., 2] = np.sin(beta/2)*np.cos((alpha-gamma)/2) # y quaternion components
R[..., 3] = np.cos(beta/2)*np.sin((alpha+gamma)/2) # z quaternion components
return as_quat_array(R)
def as_spherical_coords(q):
"""Return the spherical coordinates corresponding to this quaternion
Obviously, spherical coordinates do not contain as much information as a
quaternion, so this function does lose some information. However, the
returned spherical coordinates will represent the point(s) on the sphere
to which the input quaternion(s) rotate the z axis.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must be nonzero
Returns
-------
vartheta_varphi: float array
Output shape is q.shape+(2,). These represent the angles (vartheta,
varphi) in radians, where the normalized input quaternion represents
`exp(varphi*z/2) * exp(vartheta*y/2)`, up to an arbitrary inital
rotation about `z`.
"""
return as_euler_angles(q)[..., 1::-1]
def from_spherical_coords(theta_phi, phi=None):
"""Return the quaternion corresponding to these spherical coordinates
Assumes the spherical coordinates correspond to the quaternion R via
R = exp(phi*z/2) * exp(theta*y/2)
The angles naturally must be in radians for this to make any sense.
Note that this quaternion rotates `z` onto the point with the given
spherical coordinates, but also rotates `x` and `y` onto the usual basis
vectors (theta and phi, respectively) at that point.
Parameters
----------
theta_phi: float or array of floats
This argument may either contain an array with last dimension of
size 2, where those two elements describe the (theta, phi) values in
radians for each point; or it may contain just the theta values in
radians, in which case the next argument must also be given.
phi: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first argument.
Returns
-------
R: quaternion array
If the second argument is not given to this function, the shape
will be the same as the input shape except for the last dimension,
which will be removed. If the second argument is given, this
output array will have the shape resulting from broadcasting the
two input arrays against each other.
"""
# Figure out the input angles from either type of input
if phi is None:
theta_phi = np.asarray(theta_phi, dtype=np.double)
theta = theta_phi[..., 0]
phi = theta_phi[..., 1]
else:
theta = np.asarray(theta_phi, dtype=np.double)
phi = np.asarray(phi, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(theta, phi).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(phi/2)*np.cos(theta/2) # scalar quaternion components
R[..., 1] = -np.sin(phi/2)*np.sin(theta/2) # x quaternion components
R[..., 2] = np.cos(phi/2)*np.sin(theta/2) # y quaternion components
R[..., 3] = np.sin(phi/2)*np.cos(theta/2) # z quaternion components
return as_quat_array(R)
def rotate_vectors(R, v, axis=-1):
"""Rotate vectors by given quaternions
For simplicity, this function simply converts the input
quaternion(s) to a matrix, and rotates the input vector(s) by the
usual matrix multiplication. However, it should be noted that if
each input quaternion is only used to rotate a single vector, it
is more efficient (in terms of operation counts) to use the
formula
v' = v + 2 * r x (s * v + r x v) / m
where x represents the cross product, s and r are the scalar and
vector parts of the quaternion, respectively, and m is the sum of
the squares of the components of the quaternion. If you are
looping over a very large number of quaternions, and just rotating
a single vector each time, you might want to implement that
alternative algorithm using numba (or something that doesn't use
python).
Parameters
==========
R: quaternion array
Quaternions by which to rotate the input vectors
v: float array
Three-vectors to be rotated.
axis: int
Axis of the `v` array to use as the vector dimension. This
axis of `v` must have length 3.
Returns
=======
vprime: float array
The rotated vectors. This array has shape R.shape+v.shape.
"""
R = np.asarray(R, dtype=np.quaternion)
v = np.asarray(v, dtype=float)
if v.ndim < 1 or 3 not in v.shape:
raise ValueError("Input `v` does not have at least one dimension of length 3")
if v.shape[axis] != 3:
raise ValueError("Input `v` axis {0} has length {1}, not 3.".format(axis, v.shape[axis]))
m = as_rotation_matrix(R)
m_axes = list(range(m.ndim))
v_axes = list(range(m.ndim, m.ndim+v.ndim))
mv_axes = list(v_axes)
mv_axes[axis] = m_axes[-2]
mv_axes = m_axes[:-2] + mv_axes
v_axes[axis] = m_axes[-1]
return np.einsum(m, m_axes, v, v_axes, mv_axes)
def isclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False):
"""
Returns a boolean array where two arrays are element-wise equal within a
tolerance.
This function is essentially a copy of the `numpy.isclose` function,
with different default tolerances and one minor changes necessary to
deal correctly with quaternions.
The tolerance values are positive, typically very small numbers. The
relative difference (`rtol` * abs(`b`)) and the absolute difference
`atol` are added together to compare against the absolute difference
between `a` and `b`.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
Returns
-------
y : array_like
Returns a boolean array of where `a` and `b` are equal within the
given tolerance. If both `a` and `b` are scalars, returns a single
boolean value.
See Also
--------
allclose
Notes
-----
For finite values, isclose uses the following equation to test whether
two floating point values are equivalent:
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`isclose(a, b)` might be different from `isclose(b, a)` in
some rare cases.
Examples
--------
>>> quaternion.isclose([1e10*quaternion.x, 1e-7*quaternion.y], [1.00001e10*quaternion.x, 1e-8*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, False])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.00001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, True])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.0001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([False, True])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y])
array([True, False])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y], equal_nan=True)
array([True, True])
"""
def within_tol(x, y, atol, rtol):
with np.errstate(invalid='ignore'):
result = np.less_equal(abs(x-y), atol + rtol * abs(y))
if np.isscalar(a) and np.isscalar(b):
result = bool(result)
return result
x = np.array(a, copy=False, subok=True, ndmin=1)
y = np.array(b, copy=False, subok=True, ndmin=1)
# Make sure y is an inexact type to avoid bad behavior on abs(MIN_INT).
# This will cause casting of x later. Also, make sure to allow subclasses
# (e.g., for numpy.ma).
try:
dt = np.result_type(y, 1.)
except TypeError:
dt = np.dtype(np.quaternion)
y = np.array(y, dtype=dt, copy=False, subok=True)
xfin = np.isfinite(x)
yfin = np.isfinite(y)
if np.all(xfin) and np.all(yfin):
return within_tol(x, y, atol, rtol)
else:
finite = xfin & yfin
cond = np.zeros_like(finite, subok=True)
# Because we're using boolean indexing, x & y must be the same shape.
# Ideally, we'd just do x, y = broadcast_arrays(x, y). It's in
# lib.stride_tricks, though, so we can't import it here.
x = x * np.ones_like(cond)
y = y * np.ones_like(cond)
# Avoid subtraction with infinite/nan values...
cond[finite] = within_tol(x[finite], y[finite], atol, rtol)
# Check for equality of infinite values...
cond[~finite] = (x[~finite] == y[~finite])
if equal_nan:
# Make NaN == NaN
both_nan = np.isnan(x) & np.isnan(y)
cond[both_nan] = both_nan[both_nan]
if np.isscalar(a) and np.isscalar(b):
return bool(cond)
else:
return cond
def allclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False, verbose=False):
"""
Returns True if two arrays are element-wise equal within a tolerance.
This function is essentially a wrapper for the `quaternion.isclose`
function, but returns a single boolean value of True if all elements
of the output from `quaternion.isclose` are True, and False otherwise.
This function also adds the option.
Note that this function has stricter tolerances than the
`numpy.allclose` function, as well as the additional `verbose` option.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
verbose : bool
If the return value is False,
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
See Also
--------
isclose, numpy.all, numpy.any, numpy.allclose
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
Notes
-----
If the following equation is element-wise True, then allclose returns
True.
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`allclose(a, b)` might be different from `allclose(b, a)` in
some rare cases.
"""
close = isclose(a, b, rtol=rtol, atol=atol, equal_nan=equal_nan)
result = np.all(close)
if verbose and not result:
print('Non-close values:')
for i in np.argwhere(close == False):
i = tuple(i)
print('\n x[{0}]={1}\n y[{0}]={2}'.format(i, a[i], b[i]))
return result
|
moble/quaternion
|
__init__.py
|
as_quat_array
|
python
|
def as_quat_array(a):
a = np.asarray(a, dtype=np.double)
# fast path
if a.shape == (4,):
return quaternion(a[0], a[1], a[2], a[3])
# view only works if the last axis is C-contiguous
if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:
a = a.copy(order='C')
try:
av = a.view(np.quaternion)
except ValueError as e:
message = (str(e) + '\n '
+ 'Failed to view input data as a series of quaternions. '
+ 'Please ensure that the last dimension has size divisible by 4.\n '
+ 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))
raise ValueError(message)
# special case: don't create an axis for a single quaternion, to
# match the output of `as_float_array`
if av.shape[-1] == 1:
av = av.reshape(a.shape[:-1])
return av
|
View a float array as an array of quaternions
The input array must have a final dimension whose size is
divisible by four (or better yet *is* 4), because successive
indices in that last dimension will be considered successive
components of the output quaternion.
This function is usually fast (of order 1 microsecond) because no
data is copied; the returned quantity is just a "view" of the
original. However, if the input array is not C-contiguous
(basically, as you increment the index into the last dimension of
the array, you just move to the neighboring float in memory), the
data will need to be copied which may be quite slow. Therefore,
you should try to ensure that the input array is in that order.
Slices and transpositions will frequently break that rule.
We will not convert back from a two-spinor array because there is
no unique convention for them, so I don't want to mess with that.
Also, we want to discourage users from the slow, memory-copying
process of swapping columns required for useful definitions of
the two-spinors.
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/__init__.py#L66-L113
| null |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import division, print_function, absolute_import
import numpy as np
from .numpy_quaternion import (quaternion, _eps,
slerp_evaluate, squad_evaluate,
# slerp_vectorized, squad_vectorized,
# slerp, squad,
)
from .quaternion_time_series import slerp, squad, integrate_angular_velocity, minimal_rotation
from .calculus import derivative, definite_integral, indefinite_integral
from .means import mean_rotor_in_chordal_metric, optimal_alignment_in_chordal_metric
from ._version import __version__
__doc_title__ = "Quaternion dtype for NumPy"
__doc__ = "Adds a quaternion dtype to NumPy."
__all__ = ['quaternion',
'as_quat_array', 'as_spinor_array',
'as_float_array', 'from_float_array',
'as_rotation_matrix', 'from_rotation_matrix',
'as_rotation_vector', 'from_rotation_vector',
'as_euler_angles', 'from_euler_angles',
'as_spherical_coords', 'from_spherical_coords',
'rotate_vectors', 'allclose',
'rotor_intrinsic_distance', 'rotor_chordal_distance',
'rotation_intrinsic_distance', 'rotation_chordal_distance',
'slerp_evaluate', 'squad_evaluate',
'zero', 'one', 'x', 'y', 'z', 'integrate_angular_velocity',
'squad', 'slerp', 'derivative', 'definite_integral', 'indefinite_integral']
if 'quaternion' in np.__dict__:
raise RuntimeError('The NumPy package already has a quaternion type')
np.quaternion = quaternion
np.typeDict['quaternion'] = np.dtype(quaternion)
zero = np.quaternion(0, 0, 0, 0)
one = np.quaternion(1, 0, 0, 0)
x = np.quaternion(0, 1, 0, 0)
y = np.quaternion(0, 0, 1, 0)
z = np.quaternion(0, 0, 0, 1)
rotor_intrinsic_distance = np.rotor_intrinsic_distance
rotor_chordal_distance = np.rotor_chordal_distance
rotation_intrinsic_distance = np.rotation_intrinsic_distance
rotation_chordal_distance = np.rotation_chordal_distance
def as_float_array(a):
"""View the quaternion array as an array of floats
This function is fast (of order 1 microsecond) because no data is
copied; the returned quantity is just a "view" of the original.
The output view has one more dimension (of size 4) than the input
array, but is otherwise the same shape.
"""
return np.asarray(a, dtype=np.quaternion).view((np.double, 4))
def from_float_array(a):
return as_quat_array(a)
def as_spinor_array(a):
"""View a quaternion array as spinors in two-complex representation
This function is relatively slow and scales poorly, because memory
copying is apparently involved -- I think it's due to the
"advanced indexing" required to swap the columns.
"""
a = np.atleast_1d(a)
assert a.dtype == np.dtype(np.quaternion)
# I'm not sure why it has to be so complicated, but all of these steps
# appear to be necessary in this case.
return a.view(np.float).reshape(a.shape + (4,))[..., [0, 3, 2, 1]].ravel().view(np.complex).reshape(a.shape + (2,))
def as_rotation_matrix(q):
"""Convert input quaternion to 3x3 rotation matrix
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,3). This matrix should multiply (from
the left) a column vector to produce the rotated column vector.
Raises
------
ZeroDivisionError
If any of the input quaternions have norm 0.0.
"""
if q.shape == () and not isinstance(q, np.ndarray): # This is just a single quaternion
n = q.norm()
if n == 0.0:
raise ZeroDivisionError("Input to `as_rotation_matrix({0})` has zero norm".format(q))
elif abs(n-1.0) < _eps: # Input q is basically normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2), 2*(q.x*q.y - q.z*q.w), 2*(q.x*q.z + q.y*q.w)],
[2*(q.x*q.y + q.z*q.w), 1 - 2*(q.x**2 + q.z**2), 2*(q.y*q.z - q.x*q.w)],
[2*(q.x*q.z - q.y*q.w), 2*(q.y*q.z + q.x*q.w), 1 - 2*(q.x**2 + q.y**2)]
])
else: # Input q is not normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2)/n, 2*(q.x*q.y - q.z*q.w)/n, 2*(q.x*q.z + q.y*q.w)/n],
[2*(q.x*q.y + q.z*q.w)/n, 1 - 2*(q.x**2 + q.z**2)/n, 2*(q.y*q.z - q.x*q.w)/n],
[2*(q.x*q.z - q.y*q.w)/n, 2*(q.y*q.z + q.x*q.w)/n, 1 - 2*(q.x**2 + q.y**2)/n]
])
else: # This is an array of quaternions
n = np.norm(q)
if np.any(n == 0.0):
raise ZeroDivisionError("Array input to `as_rotation_matrix` has at least one element with zero norm")
else: # Assume input q is not normalized
m = np.empty(q.shape + (3, 3))
q = as_float_array(q)
m[..., 0, 0] = 1.0 - 2*(q[..., 2]**2 + q[..., 3]**2)/n
m[..., 0, 1] = 2*(q[..., 1]*q[..., 2] - q[..., 3]*q[..., 0])/n
m[..., 0, 2] = 2*(q[..., 1]*q[..., 3] + q[..., 2]*q[..., 0])/n
m[..., 1, 0] = 2*(q[..., 1]*q[..., 2] + q[..., 3]*q[..., 0])/n
m[..., 1, 1] = 1.0 - 2*(q[..., 1]**2 + q[..., 3]**2)/n
m[..., 1, 2] = 2*(q[..., 2]*q[..., 3] - q[..., 1]*q[..., 0])/n
m[..., 2, 0] = 2*(q[..., 1]*q[..., 3] - q[..., 2]*q[..., 0])/n
m[..., 2, 1] = 2*(q[..., 2]*q[..., 3] + q[..., 1]*q[..., 0])/n
m[..., 2, 2] = 1.0 - 2*(q[..., 1]**2 + q[..., 2]**2)/n
return m
def from_rotation_matrix(rot, nonorthogonal=True):
"""Convert input 3x3 rotation matrix to unit quaternion
By default, if scipy.linalg is available, this function uses
Bar-Itzhack's algorithm to allow for non-orthogonal matrices.
[J. Guidance, Vol. 23, No. 6, p. 1085 <http://dx.doi.org/10.2514/2.4654>]
This will almost certainly be quite a bit slower than simpler versions,
though it will be more robust to numerical errors in the rotation matrix.
Also note that Bar-Itzhack uses some pretty weird conventions. The last
component of the quaternion appears to represent the scalar, and the
quaternion itself is conjugated relative to the convention used
throughout this module.
If scipy.linalg is not available or if the optional
`nonorthogonal` parameter is set to `False`, this function falls
back to the possibly faster, but less robust, algorithm of Markley
[J. Guidance, Vol. 31, No. 2, p. 440
<http://dx.doi.org/10.2514/1.31730>].
Parameters
----------
rot: (...Nx3x3) float array
Each 3x3 matrix represents a rotation by multiplying (from the left)
a column vector to produce a rotated column vector. Note that this
input may actually have ndims>3; it is just assumed that the last
two dimensions have size 3, representing the matrix.
nonorthogonal: bool, optional
If scipy.linalg is available, use the more robust algorithm of
Bar-Itzhack. Default value is True.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-2].
Raises
------
LinAlgError
If any of the eigenvalue solutions does not converge
"""
try:
from scipy import linalg
except ImportError:
linalg = False
rot = np.array(rot, copy=False)
shape = rot.shape[:-2]
if linalg and nonorthogonal:
from operator import mul
from functools import reduce
K3 = np.empty(shape+(4, 4))
K3[..., 0, 0] = (rot[..., 0, 0] - rot[..., 1, 1] - rot[..., 2, 2])/3.0
K3[..., 0, 1] = (rot[..., 1, 0] + rot[..., 0, 1])/3.0
K3[..., 0, 2] = (rot[..., 2, 0] + rot[..., 0, 2])/3.0
K3[..., 0, 3] = (rot[..., 1, 2] - rot[..., 2, 1])/3.0
K3[..., 1, 0] = K3[..., 0, 1]
K3[..., 1, 1] = (rot[..., 1, 1] - rot[..., 0, 0] - rot[..., 2, 2])/3.0
K3[..., 1, 2] = (rot[..., 2, 1] + rot[..., 1, 2])/3.0
K3[..., 1, 3] = (rot[..., 2, 0] - rot[..., 0, 2])/3.0
K3[..., 2, 0] = K3[..., 0, 2]
K3[..., 2, 1] = K3[..., 1, 2]
K3[..., 2, 2] = (rot[..., 2, 2] - rot[..., 0, 0] - rot[..., 1, 1])/3.0
K3[..., 2, 3] = (rot[..., 0, 1] - rot[..., 1, 0])/3.0
K3[..., 3, 0] = K3[..., 0, 3]
K3[..., 3, 1] = K3[..., 1, 3]
K3[..., 3, 2] = K3[..., 2, 3]
K3[..., 3, 3] = (rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2])/3.0
if not shape:
q = zero.copy()
eigvals, eigvecs = linalg.eigh(K3.T, eigvals=(3, 3))
q.components[0] = eigvecs[-1]
q.components[1:] = -eigvecs[:-1].flatten()
return q
else:
q = np.empty(shape+(4,), dtype=np.float)
for flat_index in range(reduce(mul, shape)):
multi_index = np.unravel_index(flat_index, shape)
eigvals, eigvecs = linalg.eigh(K3[multi_index], eigvals=(3, 3))
q[multi_index+(0,)] = eigvecs[-1]
q[multi_index+(slice(1,None),)] = -eigvecs[:-1].flatten()
return as_quat_array(q)
else: # No scipy.linalg or not `nonorthogonal`
diagonals = np.empty(shape+(4,))
diagonals[..., 0] = rot[..., 0, 0]
diagonals[..., 1] = rot[..., 1, 1]
diagonals[..., 2] = rot[..., 2, 2]
diagonals[..., 3] = rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2]
indices = np.argmax(diagonals, axis=-1)
q = diagonals # reuse storage space
indices_i = (indices == 0)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 1] = 1 + rot_i[..., 0, 0] - rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 2] = rot_i[..., 0, 1] + rot_i[..., 1, 0]
q[indices_i, 3] = rot_i[..., 0, 2] + rot_i[..., 2, 0]
indices_i = (indices == 1)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 1] = rot_i[..., 1, 0] + rot_i[..., 0, 1]
q[indices_i, 2] = 1 - rot_i[..., 0, 0] + rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 3] = rot_i[..., 1, 2] + rot_i[..., 2, 1]
indices_i = (indices == 2)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q[indices_i, 1] = rot_i[..., 2, 0] + rot_i[..., 0, 2]
q[indices_i, 2] = rot_i[..., 2, 1] + rot_i[..., 1, 2]
q[indices_i, 3] = 1 - rot_i[..., 0, 0] - rot_i[..., 1, 1] + rot_i[..., 2, 2]
indices_i = (indices == 3)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = 1 + rot_i[..., 0, 0] + rot_i[..., 1, 1] + rot_i[..., 2, 2]
q[indices_i, 1] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 2] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 3] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q /= np.linalg.norm(q, axis=-1)[..., np.newaxis]
return as_quat_array(q)
def as_rotation_vector(q):
"""Convert input quaternion to the axis-angle representation
Note that if any of the input quaternions has norm zero, no error is
raised, but NaNs will appear in the output.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,). Each vector represents the axis of
the rotation, with norm proportional to the angle of the rotation in
radians.
"""
return as_float_array(2*np.log(np.normalized(q)))[..., 1:]
def from_rotation_vector(rot):
"""Convert input 3-vector in axis-angle representation to unit quaternion
Parameters
----------
rot: (Nx3) float array
Each vector represents the axis of the rotation, with norm
proportional to the angle of the rotation in radians.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-1].
"""
rot = np.array(rot, copy=False)
quats = np.zeros(rot.shape[:-1]+(4,))
quats[..., 1:] = rot[...]/2
quats = as_quat_array(quats)
return np.exp(quats)
def as_euler_angles(q):
"""Open Pandora's Box
If somebody is trying to make you use Euler angles, tell them no, and
walk away, and go and tell your mum.
You don't want to use Euler angles. They are awful. Stay away. It's
one thing to convert from Euler angles to quaternions; at least you're
moving in the right direction. But to go the other way?! It's just not
right.
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles are naturally in radians.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
alpha_beta_gamma: float array
Output shape is q.shape+(3,). These represent the angles (alpha,
beta, gamma) in radians, where the normalized input quaternion
represents `exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)`.
Raises
------
AllHell
...if you try to actually use Euler angles, when you could have
been using quaternions like a sensible person.
"""
alpha_beta_gamma = np.empty(q.shape + (3,), dtype=np.float)
n = np.norm(q)
q = as_float_array(q)
alpha_beta_gamma[..., 0] = np.arctan2(q[..., 3], q[..., 0]) + np.arctan2(-q[..., 1], q[..., 2])
alpha_beta_gamma[..., 1] = 2*np.arccos(np.sqrt((q[..., 0]**2 + q[..., 3]**2)/n))
alpha_beta_gamma[..., 2] = np.arctan2(q[..., 3], q[..., 0]) - np.arctan2(-q[..., 1], q[..., 2])
return alpha_beta_gamma
def from_euler_angles(alpha_beta_gamma, beta=None, gamma=None):
"""Improve your life drastically
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles naturally must be in radians for this to make any sense.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
alpha_beta_gamma: float or array of floats
This argument may either contain an array with last dimension of
size 3, where those three elements describe the (alpha, beta, gamma)
radian values for each rotation; or it may contain just the alpha
values, in which case the next two arguments must also be given.
beta: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and third arguments.
gamma: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and second arguments.
Returns
-------
R: quaternion array
The shape of this array will be the same as the input, except that
the last dimension will be removed.
"""
# Figure out the input angles from either type of input
if gamma is None:
alpha_beta_gamma = np.asarray(alpha_beta_gamma, dtype=np.double)
alpha = alpha_beta_gamma[..., 0]
beta = alpha_beta_gamma[..., 1]
gamma = alpha_beta_gamma[..., 2]
else:
alpha = np.asarray(alpha_beta_gamma, dtype=np.double)
beta = np.asarray(beta, dtype=np.double)
gamma = np.asarray(gamma, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(alpha, beta, gamma).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(beta/2)*np.cos((alpha+gamma)/2) # scalar quaternion components
R[..., 1] = -np.sin(beta/2)*np.sin((alpha-gamma)/2) # x quaternion components
R[..., 2] = np.sin(beta/2)*np.cos((alpha-gamma)/2) # y quaternion components
R[..., 3] = np.cos(beta/2)*np.sin((alpha+gamma)/2) # z quaternion components
return as_quat_array(R)
def as_spherical_coords(q):
"""Return the spherical coordinates corresponding to this quaternion
Obviously, spherical coordinates do not contain as much information as a
quaternion, so this function does lose some information. However, the
returned spherical coordinates will represent the point(s) on the sphere
to which the input quaternion(s) rotate the z axis.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must be nonzero
Returns
-------
vartheta_varphi: float array
Output shape is q.shape+(2,). These represent the angles (vartheta,
varphi) in radians, where the normalized input quaternion represents
`exp(varphi*z/2) * exp(vartheta*y/2)`, up to an arbitrary inital
rotation about `z`.
"""
return as_euler_angles(q)[..., 1::-1]
def from_spherical_coords(theta_phi, phi=None):
"""Return the quaternion corresponding to these spherical coordinates
Assumes the spherical coordinates correspond to the quaternion R via
R = exp(phi*z/2) * exp(theta*y/2)
The angles naturally must be in radians for this to make any sense.
Note that this quaternion rotates `z` onto the point with the given
spherical coordinates, but also rotates `x` and `y` onto the usual basis
vectors (theta and phi, respectively) at that point.
Parameters
----------
theta_phi: float or array of floats
This argument may either contain an array with last dimension of
size 2, where those two elements describe the (theta, phi) values in
radians for each point; or it may contain just the theta values in
radians, in which case the next argument must also be given.
phi: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first argument.
Returns
-------
R: quaternion array
If the second argument is not given to this function, the shape
will be the same as the input shape except for the last dimension,
which will be removed. If the second argument is given, this
output array will have the shape resulting from broadcasting the
two input arrays against each other.
"""
# Figure out the input angles from either type of input
if phi is None:
theta_phi = np.asarray(theta_phi, dtype=np.double)
theta = theta_phi[..., 0]
phi = theta_phi[..., 1]
else:
theta = np.asarray(theta_phi, dtype=np.double)
phi = np.asarray(phi, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(theta, phi).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(phi/2)*np.cos(theta/2) # scalar quaternion components
R[..., 1] = -np.sin(phi/2)*np.sin(theta/2) # x quaternion components
R[..., 2] = np.cos(phi/2)*np.sin(theta/2) # y quaternion components
R[..., 3] = np.sin(phi/2)*np.cos(theta/2) # z quaternion components
return as_quat_array(R)
def rotate_vectors(R, v, axis=-1):
"""Rotate vectors by given quaternions
For simplicity, this function simply converts the input
quaternion(s) to a matrix, and rotates the input vector(s) by the
usual matrix multiplication. However, it should be noted that if
each input quaternion is only used to rotate a single vector, it
is more efficient (in terms of operation counts) to use the
formula
v' = v + 2 * r x (s * v + r x v) / m
where x represents the cross product, s and r are the scalar and
vector parts of the quaternion, respectively, and m is the sum of
the squares of the components of the quaternion. If you are
looping over a very large number of quaternions, and just rotating
a single vector each time, you might want to implement that
alternative algorithm using numba (or something that doesn't use
python).
Parameters
==========
R: quaternion array
Quaternions by which to rotate the input vectors
v: float array
Three-vectors to be rotated.
axis: int
Axis of the `v` array to use as the vector dimension. This
axis of `v` must have length 3.
Returns
=======
vprime: float array
The rotated vectors. This array has shape R.shape+v.shape.
"""
R = np.asarray(R, dtype=np.quaternion)
v = np.asarray(v, dtype=float)
if v.ndim < 1 or 3 not in v.shape:
raise ValueError("Input `v` does not have at least one dimension of length 3")
if v.shape[axis] != 3:
raise ValueError("Input `v` axis {0} has length {1}, not 3.".format(axis, v.shape[axis]))
m = as_rotation_matrix(R)
m_axes = list(range(m.ndim))
v_axes = list(range(m.ndim, m.ndim+v.ndim))
mv_axes = list(v_axes)
mv_axes[axis] = m_axes[-2]
mv_axes = m_axes[:-2] + mv_axes
v_axes[axis] = m_axes[-1]
return np.einsum(m, m_axes, v, v_axes, mv_axes)
def isclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False):
"""
Returns a boolean array where two arrays are element-wise equal within a
tolerance.
This function is essentially a copy of the `numpy.isclose` function,
with different default tolerances and one minor changes necessary to
deal correctly with quaternions.
The tolerance values are positive, typically very small numbers. The
relative difference (`rtol` * abs(`b`)) and the absolute difference
`atol` are added together to compare against the absolute difference
between `a` and `b`.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
Returns
-------
y : array_like
Returns a boolean array of where `a` and `b` are equal within the
given tolerance. If both `a` and `b` are scalars, returns a single
boolean value.
See Also
--------
allclose
Notes
-----
For finite values, isclose uses the following equation to test whether
two floating point values are equivalent:
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`isclose(a, b)` might be different from `isclose(b, a)` in
some rare cases.
Examples
--------
>>> quaternion.isclose([1e10*quaternion.x, 1e-7*quaternion.y], [1.00001e10*quaternion.x, 1e-8*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, False])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.00001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, True])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.0001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([False, True])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y])
array([True, False])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y], equal_nan=True)
array([True, True])
"""
def within_tol(x, y, atol, rtol):
with np.errstate(invalid='ignore'):
result = np.less_equal(abs(x-y), atol + rtol * abs(y))
if np.isscalar(a) and np.isscalar(b):
result = bool(result)
return result
x = np.array(a, copy=False, subok=True, ndmin=1)
y = np.array(b, copy=False, subok=True, ndmin=1)
# Make sure y is an inexact type to avoid bad behavior on abs(MIN_INT).
# This will cause casting of x later. Also, make sure to allow subclasses
# (e.g., for numpy.ma).
try:
dt = np.result_type(y, 1.)
except TypeError:
dt = np.dtype(np.quaternion)
y = np.array(y, dtype=dt, copy=False, subok=True)
xfin = np.isfinite(x)
yfin = np.isfinite(y)
if np.all(xfin) and np.all(yfin):
return within_tol(x, y, atol, rtol)
else:
finite = xfin & yfin
cond = np.zeros_like(finite, subok=True)
# Because we're using boolean indexing, x & y must be the same shape.
# Ideally, we'd just do x, y = broadcast_arrays(x, y). It's in
# lib.stride_tricks, though, so we can't import it here.
x = x * np.ones_like(cond)
y = y * np.ones_like(cond)
# Avoid subtraction with infinite/nan values...
cond[finite] = within_tol(x[finite], y[finite], atol, rtol)
# Check for equality of infinite values...
cond[~finite] = (x[~finite] == y[~finite])
if equal_nan:
# Make NaN == NaN
both_nan = np.isnan(x) & np.isnan(y)
cond[both_nan] = both_nan[both_nan]
if np.isscalar(a) and np.isscalar(b):
return bool(cond)
else:
return cond
def allclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False, verbose=False):
"""
Returns True if two arrays are element-wise equal within a tolerance.
This function is essentially a wrapper for the `quaternion.isclose`
function, but returns a single boolean value of True if all elements
of the output from `quaternion.isclose` are True, and False otherwise.
This function also adds the option.
Note that this function has stricter tolerances than the
`numpy.allclose` function, as well as the additional `verbose` option.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
verbose : bool
If the return value is False,
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
See Also
--------
isclose, numpy.all, numpy.any, numpy.allclose
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
Notes
-----
If the following equation is element-wise True, then allclose returns
True.
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`allclose(a, b)` might be different from `allclose(b, a)` in
some rare cases.
"""
close = isclose(a, b, rtol=rtol, atol=atol, equal_nan=equal_nan)
result = np.all(close)
if verbose and not result:
print('Non-close values:')
for i in np.argwhere(close == False):
i = tuple(i)
print('\n x[{0}]={1}\n y[{0}]={2}'.format(i, a[i], b[i]))
return result
|
moble/quaternion
|
__init__.py
|
as_spinor_array
|
python
|
def as_spinor_array(a):
a = np.atleast_1d(a)
assert a.dtype == np.dtype(np.quaternion)
# I'm not sure why it has to be so complicated, but all of these steps
# appear to be necessary in this case.
return a.view(np.float).reshape(a.shape + (4,))[..., [0, 3, 2, 1]].ravel().view(np.complex).reshape(a.shape + (2,))
|
View a quaternion array as spinors in two-complex representation
This function is relatively slow and scales poorly, because memory
copying is apparently involved -- I think it's due to the
"advanced indexing" required to swap the columns.
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/__init__.py#L120-L132
| null |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import division, print_function, absolute_import
import numpy as np
from .numpy_quaternion import (quaternion, _eps,
slerp_evaluate, squad_evaluate,
# slerp_vectorized, squad_vectorized,
# slerp, squad,
)
from .quaternion_time_series import slerp, squad, integrate_angular_velocity, minimal_rotation
from .calculus import derivative, definite_integral, indefinite_integral
from .means import mean_rotor_in_chordal_metric, optimal_alignment_in_chordal_metric
from ._version import __version__
__doc_title__ = "Quaternion dtype for NumPy"
__doc__ = "Adds a quaternion dtype to NumPy."
__all__ = ['quaternion',
'as_quat_array', 'as_spinor_array',
'as_float_array', 'from_float_array',
'as_rotation_matrix', 'from_rotation_matrix',
'as_rotation_vector', 'from_rotation_vector',
'as_euler_angles', 'from_euler_angles',
'as_spherical_coords', 'from_spherical_coords',
'rotate_vectors', 'allclose',
'rotor_intrinsic_distance', 'rotor_chordal_distance',
'rotation_intrinsic_distance', 'rotation_chordal_distance',
'slerp_evaluate', 'squad_evaluate',
'zero', 'one', 'x', 'y', 'z', 'integrate_angular_velocity',
'squad', 'slerp', 'derivative', 'definite_integral', 'indefinite_integral']
if 'quaternion' in np.__dict__:
raise RuntimeError('The NumPy package already has a quaternion type')
np.quaternion = quaternion
np.typeDict['quaternion'] = np.dtype(quaternion)
zero = np.quaternion(0, 0, 0, 0)
one = np.quaternion(1, 0, 0, 0)
x = np.quaternion(0, 1, 0, 0)
y = np.quaternion(0, 0, 1, 0)
z = np.quaternion(0, 0, 0, 1)
rotor_intrinsic_distance = np.rotor_intrinsic_distance
rotor_chordal_distance = np.rotor_chordal_distance
rotation_intrinsic_distance = np.rotation_intrinsic_distance
rotation_chordal_distance = np.rotation_chordal_distance
def as_float_array(a):
"""View the quaternion array as an array of floats
This function is fast (of order 1 microsecond) because no data is
copied; the returned quantity is just a "view" of the original.
The output view has one more dimension (of size 4) than the input
array, but is otherwise the same shape.
"""
return np.asarray(a, dtype=np.quaternion).view((np.double, 4))
def as_quat_array(a):
"""View a float array as an array of quaternions
The input array must have a final dimension whose size is
divisible by four (or better yet *is* 4), because successive
indices in that last dimension will be considered successive
components of the output quaternion.
This function is usually fast (of order 1 microsecond) because no
data is copied; the returned quantity is just a "view" of the
original. However, if the input array is not C-contiguous
(basically, as you increment the index into the last dimension of
the array, you just move to the neighboring float in memory), the
data will need to be copied which may be quite slow. Therefore,
you should try to ensure that the input array is in that order.
Slices and transpositions will frequently break that rule.
We will not convert back from a two-spinor array because there is
no unique convention for them, so I don't want to mess with that.
Also, we want to discourage users from the slow, memory-copying
process of swapping columns required for useful definitions of
the two-spinors.
"""
a = np.asarray(a, dtype=np.double)
# fast path
if a.shape == (4,):
return quaternion(a[0], a[1], a[2], a[3])
# view only works if the last axis is C-contiguous
if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:
a = a.copy(order='C')
try:
av = a.view(np.quaternion)
except ValueError as e:
message = (str(e) + '\n '
+ 'Failed to view input data as a series of quaternions. '
+ 'Please ensure that the last dimension has size divisible by 4.\n '
+ 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))
raise ValueError(message)
# special case: don't create an axis for a single quaternion, to
# match the output of `as_float_array`
if av.shape[-1] == 1:
av = av.reshape(a.shape[:-1])
return av
def from_float_array(a):
return as_quat_array(a)
def as_rotation_matrix(q):
"""Convert input quaternion to 3x3 rotation matrix
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,3). This matrix should multiply (from
the left) a column vector to produce the rotated column vector.
Raises
------
ZeroDivisionError
If any of the input quaternions have norm 0.0.
"""
if q.shape == () and not isinstance(q, np.ndarray): # This is just a single quaternion
n = q.norm()
if n == 0.0:
raise ZeroDivisionError("Input to `as_rotation_matrix({0})` has zero norm".format(q))
elif abs(n-1.0) < _eps: # Input q is basically normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2), 2*(q.x*q.y - q.z*q.w), 2*(q.x*q.z + q.y*q.w)],
[2*(q.x*q.y + q.z*q.w), 1 - 2*(q.x**2 + q.z**2), 2*(q.y*q.z - q.x*q.w)],
[2*(q.x*q.z - q.y*q.w), 2*(q.y*q.z + q.x*q.w), 1 - 2*(q.x**2 + q.y**2)]
])
else: # Input q is not normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2)/n, 2*(q.x*q.y - q.z*q.w)/n, 2*(q.x*q.z + q.y*q.w)/n],
[2*(q.x*q.y + q.z*q.w)/n, 1 - 2*(q.x**2 + q.z**2)/n, 2*(q.y*q.z - q.x*q.w)/n],
[2*(q.x*q.z - q.y*q.w)/n, 2*(q.y*q.z + q.x*q.w)/n, 1 - 2*(q.x**2 + q.y**2)/n]
])
else: # This is an array of quaternions
n = np.norm(q)
if np.any(n == 0.0):
raise ZeroDivisionError("Array input to `as_rotation_matrix` has at least one element with zero norm")
else: # Assume input q is not normalized
m = np.empty(q.shape + (3, 3))
q = as_float_array(q)
m[..., 0, 0] = 1.0 - 2*(q[..., 2]**2 + q[..., 3]**2)/n
m[..., 0, 1] = 2*(q[..., 1]*q[..., 2] - q[..., 3]*q[..., 0])/n
m[..., 0, 2] = 2*(q[..., 1]*q[..., 3] + q[..., 2]*q[..., 0])/n
m[..., 1, 0] = 2*(q[..., 1]*q[..., 2] + q[..., 3]*q[..., 0])/n
m[..., 1, 1] = 1.0 - 2*(q[..., 1]**2 + q[..., 3]**2)/n
m[..., 1, 2] = 2*(q[..., 2]*q[..., 3] - q[..., 1]*q[..., 0])/n
m[..., 2, 0] = 2*(q[..., 1]*q[..., 3] - q[..., 2]*q[..., 0])/n
m[..., 2, 1] = 2*(q[..., 2]*q[..., 3] + q[..., 1]*q[..., 0])/n
m[..., 2, 2] = 1.0 - 2*(q[..., 1]**2 + q[..., 2]**2)/n
return m
def from_rotation_matrix(rot, nonorthogonal=True):
"""Convert input 3x3 rotation matrix to unit quaternion
By default, if scipy.linalg is available, this function uses
Bar-Itzhack's algorithm to allow for non-orthogonal matrices.
[J. Guidance, Vol. 23, No. 6, p. 1085 <http://dx.doi.org/10.2514/2.4654>]
This will almost certainly be quite a bit slower than simpler versions,
though it will be more robust to numerical errors in the rotation matrix.
Also note that Bar-Itzhack uses some pretty weird conventions. The last
component of the quaternion appears to represent the scalar, and the
quaternion itself is conjugated relative to the convention used
throughout this module.
If scipy.linalg is not available or if the optional
`nonorthogonal` parameter is set to `False`, this function falls
back to the possibly faster, but less robust, algorithm of Markley
[J. Guidance, Vol. 31, No. 2, p. 440
<http://dx.doi.org/10.2514/1.31730>].
Parameters
----------
rot: (...Nx3x3) float array
Each 3x3 matrix represents a rotation by multiplying (from the left)
a column vector to produce a rotated column vector. Note that this
input may actually have ndims>3; it is just assumed that the last
two dimensions have size 3, representing the matrix.
nonorthogonal: bool, optional
If scipy.linalg is available, use the more robust algorithm of
Bar-Itzhack. Default value is True.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-2].
Raises
------
LinAlgError
If any of the eigenvalue solutions does not converge
"""
try:
from scipy import linalg
except ImportError:
linalg = False
rot = np.array(rot, copy=False)
shape = rot.shape[:-2]
if linalg and nonorthogonal:
from operator import mul
from functools import reduce
K3 = np.empty(shape+(4, 4))
K3[..., 0, 0] = (rot[..., 0, 0] - rot[..., 1, 1] - rot[..., 2, 2])/3.0
K3[..., 0, 1] = (rot[..., 1, 0] + rot[..., 0, 1])/3.0
K3[..., 0, 2] = (rot[..., 2, 0] + rot[..., 0, 2])/3.0
K3[..., 0, 3] = (rot[..., 1, 2] - rot[..., 2, 1])/3.0
K3[..., 1, 0] = K3[..., 0, 1]
K3[..., 1, 1] = (rot[..., 1, 1] - rot[..., 0, 0] - rot[..., 2, 2])/3.0
K3[..., 1, 2] = (rot[..., 2, 1] + rot[..., 1, 2])/3.0
K3[..., 1, 3] = (rot[..., 2, 0] - rot[..., 0, 2])/3.0
K3[..., 2, 0] = K3[..., 0, 2]
K3[..., 2, 1] = K3[..., 1, 2]
K3[..., 2, 2] = (rot[..., 2, 2] - rot[..., 0, 0] - rot[..., 1, 1])/3.0
K3[..., 2, 3] = (rot[..., 0, 1] - rot[..., 1, 0])/3.0
K3[..., 3, 0] = K3[..., 0, 3]
K3[..., 3, 1] = K3[..., 1, 3]
K3[..., 3, 2] = K3[..., 2, 3]
K3[..., 3, 3] = (rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2])/3.0
if not shape:
q = zero.copy()
eigvals, eigvecs = linalg.eigh(K3.T, eigvals=(3, 3))
q.components[0] = eigvecs[-1]
q.components[1:] = -eigvecs[:-1].flatten()
return q
else:
q = np.empty(shape+(4,), dtype=np.float)
for flat_index in range(reduce(mul, shape)):
multi_index = np.unravel_index(flat_index, shape)
eigvals, eigvecs = linalg.eigh(K3[multi_index], eigvals=(3, 3))
q[multi_index+(0,)] = eigvecs[-1]
q[multi_index+(slice(1,None),)] = -eigvecs[:-1].flatten()
return as_quat_array(q)
else: # No scipy.linalg or not `nonorthogonal`
diagonals = np.empty(shape+(4,))
diagonals[..., 0] = rot[..., 0, 0]
diagonals[..., 1] = rot[..., 1, 1]
diagonals[..., 2] = rot[..., 2, 2]
diagonals[..., 3] = rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2]
indices = np.argmax(diagonals, axis=-1)
q = diagonals # reuse storage space
indices_i = (indices == 0)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 1] = 1 + rot_i[..., 0, 0] - rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 2] = rot_i[..., 0, 1] + rot_i[..., 1, 0]
q[indices_i, 3] = rot_i[..., 0, 2] + rot_i[..., 2, 0]
indices_i = (indices == 1)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 1] = rot_i[..., 1, 0] + rot_i[..., 0, 1]
q[indices_i, 2] = 1 - rot_i[..., 0, 0] + rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 3] = rot_i[..., 1, 2] + rot_i[..., 2, 1]
indices_i = (indices == 2)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q[indices_i, 1] = rot_i[..., 2, 0] + rot_i[..., 0, 2]
q[indices_i, 2] = rot_i[..., 2, 1] + rot_i[..., 1, 2]
q[indices_i, 3] = 1 - rot_i[..., 0, 0] - rot_i[..., 1, 1] + rot_i[..., 2, 2]
indices_i = (indices == 3)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = 1 + rot_i[..., 0, 0] + rot_i[..., 1, 1] + rot_i[..., 2, 2]
q[indices_i, 1] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 2] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 3] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q /= np.linalg.norm(q, axis=-1)[..., np.newaxis]
return as_quat_array(q)
def as_rotation_vector(q):
"""Convert input quaternion to the axis-angle representation
Note that if any of the input quaternions has norm zero, no error is
raised, but NaNs will appear in the output.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,). Each vector represents the axis of
the rotation, with norm proportional to the angle of the rotation in
radians.
"""
return as_float_array(2*np.log(np.normalized(q)))[..., 1:]
def from_rotation_vector(rot):
"""Convert input 3-vector in axis-angle representation to unit quaternion
Parameters
----------
rot: (Nx3) float array
Each vector represents the axis of the rotation, with norm
proportional to the angle of the rotation in radians.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-1].
"""
rot = np.array(rot, copy=False)
quats = np.zeros(rot.shape[:-1]+(4,))
quats[..., 1:] = rot[...]/2
quats = as_quat_array(quats)
return np.exp(quats)
def as_euler_angles(q):
"""Open Pandora's Box
If somebody is trying to make you use Euler angles, tell them no, and
walk away, and go and tell your mum.
You don't want to use Euler angles. They are awful. Stay away. It's
one thing to convert from Euler angles to quaternions; at least you're
moving in the right direction. But to go the other way?! It's just not
right.
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles are naturally in radians.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
alpha_beta_gamma: float array
Output shape is q.shape+(3,). These represent the angles (alpha,
beta, gamma) in radians, where the normalized input quaternion
represents `exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)`.
Raises
------
AllHell
...if you try to actually use Euler angles, when you could have
been using quaternions like a sensible person.
"""
alpha_beta_gamma = np.empty(q.shape + (3,), dtype=np.float)
n = np.norm(q)
q = as_float_array(q)
alpha_beta_gamma[..., 0] = np.arctan2(q[..., 3], q[..., 0]) + np.arctan2(-q[..., 1], q[..., 2])
alpha_beta_gamma[..., 1] = 2*np.arccos(np.sqrt((q[..., 0]**2 + q[..., 3]**2)/n))
alpha_beta_gamma[..., 2] = np.arctan2(q[..., 3], q[..., 0]) - np.arctan2(-q[..., 1], q[..., 2])
return alpha_beta_gamma
def from_euler_angles(alpha_beta_gamma, beta=None, gamma=None):
"""Improve your life drastically
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles naturally must be in radians for this to make any sense.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
alpha_beta_gamma: float or array of floats
This argument may either contain an array with last dimension of
size 3, where those three elements describe the (alpha, beta, gamma)
radian values for each rotation; or it may contain just the alpha
values, in which case the next two arguments must also be given.
beta: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and third arguments.
gamma: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and second arguments.
Returns
-------
R: quaternion array
The shape of this array will be the same as the input, except that
the last dimension will be removed.
"""
# Figure out the input angles from either type of input
if gamma is None:
alpha_beta_gamma = np.asarray(alpha_beta_gamma, dtype=np.double)
alpha = alpha_beta_gamma[..., 0]
beta = alpha_beta_gamma[..., 1]
gamma = alpha_beta_gamma[..., 2]
else:
alpha = np.asarray(alpha_beta_gamma, dtype=np.double)
beta = np.asarray(beta, dtype=np.double)
gamma = np.asarray(gamma, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(alpha, beta, gamma).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(beta/2)*np.cos((alpha+gamma)/2) # scalar quaternion components
R[..., 1] = -np.sin(beta/2)*np.sin((alpha-gamma)/2) # x quaternion components
R[..., 2] = np.sin(beta/2)*np.cos((alpha-gamma)/2) # y quaternion components
R[..., 3] = np.cos(beta/2)*np.sin((alpha+gamma)/2) # z quaternion components
return as_quat_array(R)
def as_spherical_coords(q):
"""Return the spherical coordinates corresponding to this quaternion
Obviously, spherical coordinates do not contain as much information as a
quaternion, so this function does lose some information. However, the
returned spherical coordinates will represent the point(s) on the sphere
to which the input quaternion(s) rotate the z axis.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must be nonzero
Returns
-------
vartheta_varphi: float array
Output shape is q.shape+(2,). These represent the angles (vartheta,
varphi) in radians, where the normalized input quaternion represents
`exp(varphi*z/2) * exp(vartheta*y/2)`, up to an arbitrary inital
rotation about `z`.
"""
return as_euler_angles(q)[..., 1::-1]
def from_spherical_coords(theta_phi, phi=None):
"""Return the quaternion corresponding to these spherical coordinates
Assumes the spherical coordinates correspond to the quaternion R via
R = exp(phi*z/2) * exp(theta*y/2)
The angles naturally must be in radians for this to make any sense.
Note that this quaternion rotates `z` onto the point with the given
spherical coordinates, but also rotates `x` and `y` onto the usual basis
vectors (theta and phi, respectively) at that point.
Parameters
----------
theta_phi: float or array of floats
This argument may either contain an array with last dimension of
size 2, where those two elements describe the (theta, phi) values in
radians for each point; or it may contain just the theta values in
radians, in which case the next argument must also be given.
phi: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first argument.
Returns
-------
R: quaternion array
If the second argument is not given to this function, the shape
will be the same as the input shape except for the last dimension,
which will be removed. If the second argument is given, this
output array will have the shape resulting from broadcasting the
two input arrays against each other.
"""
# Figure out the input angles from either type of input
if phi is None:
theta_phi = np.asarray(theta_phi, dtype=np.double)
theta = theta_phi[..., 0]
phi = theta_phi[..., 1]
else:
theta = np.asarray(theta_phi, dtype=np.double)
phi = np.asarray(phi, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(theta, phi).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(phi/2)*np.cos(theta/2) # scalar quaternion components
R[..., 1] = -np.sin(phi/2)*np.sin(theta/2) # x quaternion components
R[..., 2] = np.cos(phi/2)*np.sin(theta/2) # y quaternion components
R[..., 3] = np.sin(phi/2)*np.cos(theta/2) # z quaternion components
return as_quat_array(R)
def rotate_vectors(R, v, axis=-1):
"""Rotate vectors by given quaternions
For simplicity, this function simply converts the input
quaternion(s) to a matrix, and rotates the input vector(s) by the
usual matrix multiplication. However, it should be noted that if
each input quaternion is only used to rotate a single vector, it
is more efficient (in terms of operation counts) to use the
formula
v' = v + 2 * r x (s * v + r x v) / m
where x represents the cross product, s and r are the scalar and
vector parts of the quaternion, respectively, and m is the sum of
the squares of the components of the quaternion. If you are
looping over a very large number of quaternions, and just rotating
a single vector each time, you might want to implement that
alternative algorithm using numba (or something that doesn't use
python).
Parameters
==========
R: quaternion array
Quaternions by which to rotate the input vectors
v: float array
Three-vectors to be rotated.
axis: int
Axis of the `v` array to use as the vector dimension. This
axis of `v` must have length 3.
Returns
=======
vprime: float array
The rotated vectors. This array has shape R.shape+v.shape.
"""
R = np.asarray(R, dtype=np.quaternion)
v = np.asarray(v, dtype=float)
if v.ndim < 1 or 3 not in v.shape:
raise ValueError("Input `v` does not have at least one dimension of length 3")
if v.shape[axis] != 3:
raise ValueError("Input `v` axis {0} has length {1}, not 3.".format(axis, v.shape[axis]))
m = as_rotation_matrix(R)
m_axes = list(range(m.ndim))
v_axes = list(range(m.ndim, m.ndim+v.ndim))
mv_axes = list(v_axes)
mv_axes[axis] = m_axes[-2]
mv_axes = m_axes[:-2] + mv_axes
v_axes[axis] = m_axes[-1]
return np.einsum(m, m_axes, v, v_axes, mv_axes)
def isclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False):
"""
Returns a boolean array where two arrays are element-wise equal within a
tolerance.
This function is essentially a copy of the `numpy.isclose` function,
with different default tolerances and one minor changes necessary to
deal correctly with quaternions.
The tolerance values are positive, typically very small numbers. The
relative difference (`rtol` * abs(`b`)) and the absolute difference
`atol` are added together to compare against the absolute difference
between `a` and `b`.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
Returns
-------
y : array_like
Returns a boolean array of where `a` and `b` are equal within the
given tolerance. If both `a` and `b` are scalars, returns a single
boolean value.
See Also
--------
allclose
Notes
-----
For finite values, isclose uses the following equation to test whether
two floating point values are equivalent:
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`isclose(a, b)` might be different from `isclose(b, a)` in
some rare cases.
Examples
--------
>>> quaternion.isclose([1e10*quaternion.x, 1e-7*quaternion.y], [1.00001e10*quaternion.x, 1e-8*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, False])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.00001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, True])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.0001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([False, True])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y])
array([True, False])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y], equal_nan=True)
array([True, True])
"""
def within_tol(x, y, atol, rtol):
with np.errstate(invalid='ignore'):
result = np.less_equal(abs(x-y), atol + rtol * abs(y))
if np.isscalar(a) and np.isscalar(b):
result = bool(result)
return result
x = np.array(a, copy=False, subok=True, ndmin=1)
y = np.array(b, copy=False, subok=True, ndmin=1)
# Make sure y is an inexact type to avoid bad behavior on abs(MIN_INT).
# This will cause casting of x later. Also, make sure to allow subclasses
# (e.g., for numpy.ma).
try:
dt = np.result_type(y, 1.)
except TypeError:
dt = np.dtype(np.quaternion)
y = np.array(y, dtype=dt, copy=False, subok=True)
xfin = np.isfinite(x)
yfin = np.isfinite(y)
if np.all(xfin) and np.all(yfin):
return within_tol(x, y, atol, rtol)
else:
finite = xfin & yfin
cond = np.zeros_like(finite, subok=True)
# Because we're using boolean indexing, x & y must be the same shape.
# Ideally, we'd just do x, y = broadcast_arrays(x, y). It's in
# lib.stride_tricks, though, so we can't import it here.
x = x * np.ones_like(cond)
y = y * np.ones_like(cond)
# Avoid subtraction with infinite/nan values...
cond[finite] = within_tol(x[finite], y[finite], atol, rtol)
# Check for equality of infinite values...
cond[~finite] = (x[~finite] == y[~finite])
if equal_nan:
# Make NaN == NaN
both_nan = np.isnan(x) & np.isnan(y)
cond[both_nan] = both_nan[both_nan]
if np.isscalar(a) and np.isscalar(b):
return bool(cond)
else:
return cond
def allclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False, verbose=False):
"""
Returns True if two arrays are element-wise equal within a tolerance.
This function is essentially a wrapper for the `quaternion.isclose`
function, but returns a single boolean value of True if all elements
of the output from `quaternion.isclose` are True, and False otherwise.
This function also adds the option.
Note that this function has stricter tolerances than the
`numpy.allclose` function, as well as the additional `verbose` option.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
verbose : bool
If the return value is False,
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
See Also
--------
isclose, numpy.all, numpy.any, numpy.allclose
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
Notes
-----
If the following equation is element-wise True, then allclose returns
True.
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`allclose(a, b)` might be different from `allclose(b, a)` in
some rare cases.
"""
close = isclose(a, b, rtol=rtol, atol=atol, equal_nan=equal_nan)
result = np.all(close)
if verbose and not result:
print('Non-close values:')
for i in np.argwhere(close == False):
i = tuple(i)
print('\n x[{0}]={1}\n y[{0}]={2}'.format(i, a[i], b[i]))
return result
|
moble/quaternion
|
__init__.py
|
as_rotation_matrix
|
python
|
def as_rotation_matrix(q):
if q.shape == () and not isinstance(q, np.ndarray): # This is just a single quaternion
n = q.norm()
if n == 0.0:
raise ZeroDivisionError("Input to `as_rotation_matrix({0})` has zero norm".format(q))
elif abs(n-1.0) < _eps: # Input q is basically normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2), 2*(q.x*q.y - q.z*q.w), 2*(q.x*q.z + q.y*q.w)],
[2*(q.x*q.y + q.z*q.w), 1 - 2*(q.x**2 + q.z**2), 2*(q.y*q.z - q.x*q.w)],
[2*(q.x*q.z - q.y*q.w), 2*(q.y*q.z + q.x*q.w), 1 - 2*(q.x**2 + q.y**2)]
])
else: # Input q is not normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2)/n, 2*(q.x*q.y - q.z*q.w)/n, 2*(q.x*q.z + q.y*q.w)/n],
[2*(q.x*q.y + q.z*q.w)/n, 1 - 2*(q.x**2 + q.z**2)/n, 2*(q.y*q.z - q.x*q.w)/n],
[2*(q.x*q.z - q.y*q.w)/n, 2*(q.y*q.z + q.x*q.w)/n, 1 - 2*(q.x**2 + q.y**2)/n]
])
else: # This is an array of quaternions
n = np.norm(q)
if np.any(n == 0.0):
raise ZeroDivisionError("Array input to `as_rotation_matrix` has at least one element with zero norm")
else: # Assume input q is not normalized
m = np.empty(q.shape + (3, 3))
q = as_float_array(q)
m[..., 0, 0] = 1.0 - 2*(q[..., 2]**2 + q[..., 3]**2)/n
m[..., 0, 1] = 2*(q[..., 1]*q[..., 2] - q[..., 3]*q[..., 0])/n
m[..., 0, 2] = 2*(q[..., 1]*q[..., 3] + q[..., 2]*q[..., 0])/n
m[..., 1, 0] = 2*(q[..., 1]*q[..., 2] + q[..., 3]*q[..., 0])/n
m[..., 1, 1] = 1.0 - 2*(q[..., 1]**2 + q[..., 3]**2)/n
m[..., 1, 2] = 2*(q[..., 2]*q[..., 3] - q[..., 1]*q[..., 0])/n
m[..., 2, 0] = 2*(q[..., 1]*q[..., 3] - q[..., 2]*q[..., 0])/n
m[..., 2, 1] = 2*(q[..., 2]*q[..., 3] + q[..., 1]*q[..., 0])/n
m[..., 2, 2] = 1.0 - 2*(q[..., 1]**2 + q[..., 2]**2)/n
return m
|
Convert input quaternion to 3x3 rotation matrix
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,3). This matrix should multiply (from
the left) a column vector to produce the rotated column vector.
Raises
------
ZeroDivisionError
If any of the input quaternions have norm 0.0.
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/__init__.py#L135-L187
|
[
"def as_float_array(a):\n \"\"\"View the quaternion array as an array of floats\n\n This function is fast (of order 1 microsecond) because no data is\n copied; the returned quantity is just a \"view\" of the original.\n\n The output view has one more dimension (of size 4) than the input\n array, but is otherwise the same shape.\n\n \"\"\"\n return np.asarray(a, dtype=np.quaternion).view((np.double, 4))\n"
] |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import division, print_function, absolute_import
import numpy as np
from .numpy_quaternion import (quaternion, _eps,
slerp_evaluate, squad_evaluate,
# slerp_vectorized, squad_vectorized,
# slerp, squad,
)
from .quaternion_time_series import slerp, squad, integrate_angular_velocity, minimal_rotation
from .calculus import derivative, definite_integral, indefinite_integral
from .means import mean_rotor_in_chordal_metric, optimal_alignment_in_chordal_metric
from ._version import __version__
__doc_title__ = "Quaternion dtype for NumPy"
__doc__ = "Adds a quaternion dtype to NumPy."
__all__ = ['quaternion',
'as_quat_array', 'as_spinor_array',
'as_float_array', 'from_float_array',
'as_rotation_matrix', 'from_rotation_matrix',
'as_rotation_vector', 'from_rotation_vector',
'as_euler_angles', 'from_euler_angles',
'as_spherical_coords', 'from_spherical_coords',
'rotate_vectors', 'allclose',
'rotor_intrinsic_distance', 'rotor_chordal_distance',
'rotation_intrinsic_distance', 'rotation_chordal_distance',
'slerp_evaluate', 'squad_evaluate',
'zero', 'one', 'x', 'y', 'z', 'integrate_angular_velocity',
'squad', 'slerp', 'derivative', 'definite_integral', 'indefinite_integral']
if 'quaternion' in np.__dict__:
raise RuntimeError('The NumPy package already has a quaternion type')
np.quaternion = quaternion
np.typeDict['quaternion'] = np.dtype(quaternion)
zero = np.quaternion(0, 0, 0, 0)
one = np.quaternion(1, 0, 0, 0)
x = np.quaternion(0, 1, 0, 0)
y = np.quaternion(0, 0, 1, 0)
z = np.quaternion(0, 0, 0, 1)
rotor_intrinsic_distance = np.rotor_intrinsic_distance
rotor_chordal_distance = np.rotor_chordal_distance
rotation_intrinsic_distance = np.rotation_intrinsic_distance
rotation_chordal_distance = np.rotation_chordal_distance
def as_float_array(a):
"""View the quaternion array as an array of floats
This function is fast (of order 1 microsecond) because no data is
copied; the returned quantity is just a "view" of the original.
The output view has one more dimension (of size 4) than the input
array, but is otherwise the same shape.
"""
return np.asarray(a, dtype=np.quaternion).view((np.double, 4))
def as_quat_array(a):
"""View a float array as an array of quaternions
The input array must have a final dimension whose size is
divisible by four (or better yet *is* 4), because successive
indices in that last dimension will be considered successive
components of the output quaternion.
This function is usually fast (of order 1 microsecond) because no
data is copied; the returned quantity is just a "view" of the
original. However, if the input array is not C-contiguous
(basically, as you increment the index into the last dimension of
the array, you just move to the neighboring float in memory), the
data will need to be copied which may be quite slow. Therefore,
you should try to ensure that the input array is in that order.
Slices and transpositions will frequently break that rule.
We will not convert back from a two-spinor array because there is
no unique convention for them, so I don't want to mess with that.
Also, we want to discourage users from the slow, memory-copying
process of swapping columns required for useful definitions of
the two-spinors.
"""
a = np.asarray(a, dtype=np.double)
# fast path
if a.shape == (4,):
return quaternion(a[0], a[1], a[2], a[3])
# view only works if the last axis is C-contiguous
if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:
a = a.copy(order='C')
try:
av = a.view(np.quaternion)
except ValueError as e:
message = (str(e) + '\n '
+ 'Failed to view input data as a series of quaternions. '
+ 'Please ensure that the last dimension has size divisible by 4.\n '
+ 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))
raise ValueError(message)
# special case: don't create an axis for a single quaternion, to
# match the output of `as_float_array`
if av.shape[-1] == 1:
av = av.reshape(a.shape[:-1])
return av
def from_float_array(a):
return as_quat_array(a)
def as_spinor_array(a):
"""View a quaternion array as spinors in two-complex representation
This function is relatively slow and scales poorly, because memory
copying is apparently involved -- I think it's due to the
"advanced indexing" required to swap the columns.
"""
a = np.atleast_1d(a)
assert a.dtype == np.dtype(np.quaternion)
# I'm not sure why it has to be so complicated, but all of these steps
# appear to be necessary in this case.
return a.view(np.float).reshape(a.shape + (4,))[..., [0, 3, 2, 1]].ravel().view(np.complex).reshape(a.shape + (2,))
def from_rotation_matrix(rot, nonorthogonal=True):
"""Convert input 3x3 rotation matrix to unit quaternion
By default, if scipy.linalg is available, this function uses
Bar-Itzhack's algorithm to allow for non-orthogonal matrices.
[J. Guidance, Vol. 23, No. 6, p. 1085 <http://dx.doi.org/10.2514/2.4654>]
This will almost certainly be quite a bit slower than simpler versions,
though it will be more robust to numerical errors in the rotation matrix.
Also note that Bar-Itzhack uses some pretty weird conventions. The last
component of the quaternion appears to represent the scalar, and the
quaternion itself is conjugated relative to the convention used
throughout this module.
If scipy.linalg is not available or if the optional
`nonorthogonal` parameter is set to `False`, this function falls
back to the possibly faster, but less robust, algorithm of Markley
[J. Guidance, Vol. 31, No. 2, p. 440
<http://dx.doi.org/10.2514/1.31730>].
Parameters
----------
rot: (...Nx3x3) float array
Each 3x3 matrix represents a rotation by multiplying (from the left)
a column vector to produce a rotated column vector. Note that this
input may actually have ndims>3; it is just assumed that the last
two dimensions have size 3, representing the matrix.
nonorthogonal: bool, optional
If scipy.linalg is available, use the more robust algorithm of
Bar-Itzhack. Default value is True.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-2].
Raises
------
LinAlgError
If any of the eigenvalue solutions does not converge
"""
try:
from scipy import linalg
except ImportError:
linalg = False
rot = np.array(rot, copy=False)
shape = rot.shape[:-2]
if linalg and nonorthogonal:
from operator import mul
from functools import reduce
K3 = np.empty(shape+(4, 4))
K3[..., 0, 0] = (rot[..., 0, 0] - rot[..., 1, 1] - rot[..., 2, 2])/3.0
K3[..., 0, 1] = (rot[..., 1, 0] + rot[..., 0, 1])/3.0
K3[..., 0, 2] = (rot[..., 2, 0] + rot[..., 0, 2])/3.0
K3[..., 0, 3] = (rot[..., 1, 2] - rot[..., 2, 1])/3.0
K3[..., 1, 0] = K3[..., 0, 1]
K3[..., 1, 1] = (rot[..., 1, 1] - rot[..., 0, 0] - rot[..., 2, 2])/3.0
K3[..., 1, 2] = (rot[..., 2, 1] + rot[..., 1, 2])/3.0
K3[..., 1, 3] = (rot[..., 2, 0] - rot[..., 0, 2])/3.0
K3[..., 2, 0] = K3[..., 0, 2]
K3[..., 2, 1] = K3[..., 1, 2]
K3[..., 2, 2] = (rot[..., 2, 2] - rot[..., 0, 0] - rot[..., 1, 1])/3.0
K3[..., 2, 3] = (rot[..., 0, 1] - rot[..., 1, 0])/3.0
K3[..., 3, 0] = K3[..., 0, 3]
K3[..., 3, 1] = K3[..., 1, 3]
K3[..., 3, 2] = K3[..., 2, 3]
K3[..., 3, 3] = (rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2])/3.0
if not shape:
q = zero.copy()
eigvals, eigvecs = linalg.eigh(K3.T, eigvals=(3, 3))
q.components[0] = eigvecs[-1]
q.components[1:] = -eigvecs[:-1].flatten()
return q
else:
q = np.empty(shape+(4,), dtype=np.float)
for flat_index in range(reduce(mul, shape)):
multi_index = np.unravel_index(flat_index, shape)
eigvals, eigvecs = linalg.eigh(K3[multi_index], eigvals=(3, 3))
q[multi_index+(0,)] = eigvecs[-1]
q[multi_index+(slice(1,None),)] = -eigvecs[:-1].flatten()
return as_quat_array(q)
else: # No scipy.linalg or not `nonorthogonal`
diagonals = np.empty(shape+(4,))
diagonals[..., 0] = rot[..., 0, 0]
diagonals[..., 1] = rot[..., 1, 1]
diagonals[..., 2] = rot[..., 2, 2]
diagonals[..., 3] = rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2]
indices = np.argmax(diagonals, axis=-1)
q = diagonals # reuse storage space
indices_i = (indices == 0)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 1] = 1 + rot_i[..., 0, 0] - rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 2] = rot_i[..., 0, 1] + rot_i[..., 1, 0]
q[indices_i, 3] = rot_i[..., 0, 2] + rot_i[..., 2, 0]
indices_i = (indices == 1)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 1] = rot_i[..., 1, 0] + rot_i[..., 0, 1]
q[indices_i, 2] = 1 - rot_i[..., 0, 0] + rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 3] = rot_i[..., 1, 2] + rot_i[..., 2, 1]
indices_i = (indices == 2)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q[indices_i, 1] = rot_i[..., 2, 0] + rot_i[..., 0, 2]
q[indices_i, 2] = rot_i[..., 2, 1] + rot_i[..., 1, 2]
q[indices_i, 3] = 1 - rot_i[..., 0, 0] - rot_i[..., 1, 1] + rot_i[..., 2, 2]
indices_i = (indices == 3)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = 1 + rot_i[..., 0, 0] + rot_i[..., 1, 1] + rot_i[..., 2, 2]
q[indices_i, 1] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 2] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 3] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q /= np.linalg.norm(q, axis=-1)[..., np.newaxis]
return as_quat_array(q)
def as_rotation_vector(q):
"""Convert input quaternion to the axis-angle representation
Note that if any of the input quaternions has norm zero, no error is
raised, but NaNs will appear in the output.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,). Each vector represents the axis of
the rotation, with norm proportional to the angle of the rotation in
radians.
"""
return as_float_array(2*np.log(np.normalized(q)))[..., 1:]
def from_rotation_vector(rot):
"""Convert input 3-vector in axis-angle representation to unit quaternion
Parameters
----------
rot: (Nx3) float array
Each vector represents the axis of the rotation, with norm
proportional to the angle of the rotation in radians.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-1].
"""
rot = np.array(rot, copy=False)
quats = np.zeros(rot.shape[:-1]+(4,))
quats[..., 1:] = rot[...]/2
quats = as_quat_array(quats)
return np.exp(quats)
def as_euler_angles(q):
"""Open Pandora's Box
If somebody is trying to make you use Euler angles, tell them no, and
walk away, and go and tell your mum.
You don't want to use Euler angles. They are awful. Stay away. It's
one thing to convert from Euler angles to quaternions; at least you're
moving in the right direction. But to go the other way?! It's just not
right.
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles are naturally in radians.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
alpha_beta_gamma: float array
Output shape is q.shape+(3,). These represent the angles (alpha,
beta, gamma) in radians, where the normalized input quaternion
represents `exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)`.
Raises
------
AllHell
...if you try to actually use Euler angles, when you could have
been using quaternions like a sensible person.
"""
alpha_beta_gamma = np.empty(q.shape + (3,), dtype=np.float)
n = np.norm(q)
q = as_float_array(q)
alpha_beta_gamma[..., 0] = np.arctan2(q[..., 3], q[..., 0]) + np.arctan2(-q[..., 1], q[..., 2])
alpha_beta_gamma[..., 1] = 2*np.arccos(np.sqrt((q[..., 0]**2 + q[..., 3]**2)/n))
alpha_beta_gamma[..., 2] = np.arctan2(q[..., 3], q[..., 0]) - np.arctan2(-q[..., 1], q[..., 2])
return alpha_beta_gamma
def from_euler_angles(alpha_beta_gamma, beta=None, gamma=None):
"""Improve your life drastically
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles naturally must be in radians for this to make any sense.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
alpha_beta_gamma: float or array of floats
This argument may either contain an array with last dimension of
size 3, where those three elements describe the (alpha, beta, gamma)
radian values for each rotation; or it may contain just the alpha
values, in which case the next two arguments must also be given.
beta: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and third arguments.
gamma: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and second arguments.
Returns
-------
R: quaternion array
The shape of this array will be the same as the input, except that
the last dimension will be removed.
"""
# Figure out the input angles from either type of input
if gamma is None:
alpha_beta_gamma = np.asarray(alpha_beta_gamma, dtype=np.double)
alpha = alpha_beta_gamma[..., 0]
beta = alpha_beta_gamma[..., 1]
gamma = alpha_beta_gamma[..., 2]
else:
alpha = np.asarray(alpha_beta_gamma, dtype=np.double)
beta = np.asarray(beta, dtype=np.double)
gamma = np.asarray(gamma, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(alpha, beta, gamma).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(beta/2)*np.cos((alpha+gamma)/2) # scalar quaternion components
R[..., 1] = -np.sin(beta/2)*np.sin((alpha-gamma)/2) # x quaternion components
R[..., 2] = np.sin(beta/2)*np.cos((alpha-gamma)/2) # y quaternion components
R[..., 3] = np.cos(beta/2)*np.sin((alpha+gamma)/2) # z quaternion components
return as_quat_array(R)
def as_spherical_coords(q):
"""Return the spherical coordinates corresponding to this quaternion
Obviously, spherical coordinates do not contain as much information as a
quaternion, so this function does lose some information. However, the
returned spherical coordinates will represent the point(s) on the sphere
to which the input quaternion(s) rotate the z axis.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must be nonzero
Returns
-------
vartheta_varphi: float array
Output shape is q.shape+(2,). These represent the angles (vartheta,
varphi) in radians, where the normalized input quaternion represents
`exp(varphi*z/2) * exp(vartheta*y/2)`, up to an arbitrary inital
rotation about `z`.
"""
return as_euler_angles(q)[..., 1::-1]
def from_spherical_coords(theta_phi, phi=None):
"""Return the quaternion corresponding to these spherical coordinates
Assumes the spherical coordinates correspond to the quaternion R via
R = exp(phi*z/2) * exp(theta*y/2)
The angles naturally must be in radians for this to make any sense.
Note that this quaternion rotates `z` onto the point with the given
spherical coordinates, but also rotates `x` and `y` onto the usual basis
vectors (theta and phi, respectively) at that point.
Parameters
----------
theta_phi: float or array of floats
This argument may either contain an array with last dimension of
size 2, where those two elements describe the (theta, phi) values in
radians for each point; or it may contain just the theta values in
radians, in which case the next argument must also be given.
phi: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first argument.
Returns
-------
R: quaternion array
If the second argument is not given to this function, the shape
will be the same as the input shape except for the last dimension,
which will be removed. If the second argument is given, this
output array will have the shape resulting from broadcasting the
two input arrays against each other.
"""
# Figure out the input angles from either type of input
if phi is None:
theta_phi = np.asarray(theta_phi, dtype=np.double)
theta = theta_phi[..., 0]
phi = theta_phi[..., 1]
else:
theta = np.asarray(theta_phi, dtype=np.double)
phi = np.asarray(phi, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(theta, phi).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(phi/2)*np.cos(theta/2) # scalar quaternion components
R[..., 1] = -np.sin(phi/2)*np.sin(theta/2) # x quaternion components
R[..., 2] = np.cos(phi/2)*np.sin(theta/2) # y quaternion components
R[..., 3] = np.sin(phi/2)*np.cos(theta/2) # z quaternion components
return as_quat_array(R)
def rotate_vectors(R, v, axis=-1):
"""Rotate vectors by given quaternions
For simplicity, this function simply converts the input
quaternion(s) to a matrix, and rotates the input vector(s) by the
usual matrix multiplication. However, it should be noted that if
each input quaternion is only used to rotate a single vector, it
is more efficient (in terms of operation counts) to use the
formula
v' = v + 2 * r x (s * v + r x v) / m
where x represents the cross product, s and r are the scalar and
vector parts of the quaternion, respectively, and m is the sum of
the squares of the components of the quaternion. If you are
looping over a very large number of quaternions, and just rotating
a single vector each time, you might want to implement that
alternative algorithm using numba (or something that doesn't use
python).
Parameters
==========
R: quaternion array
Quaternions by which to rotate the input vectors
v: float array
Three-vectors to be rotated.
axis: int
Axis of the `v` array to use as the vector dimension. This
axis of `v` must have length 3.
Returns
=======
vprime: float array
The rotated vectors. This array has shape R.shape+v.shape.
"""
R = np.asarray(R, dtype=np.quaternion)
v = np.asarray(v, dtype=float)
if v.ndim < 1 or 3 not in v.shape:
raise ValueError("Input `v` does not have at least one dimension of length 3")
if v.shape[axis] != 3:
raise ValueError("Input `v` axis {0} has length {1}, not 3.".format(axis, v.shape[axis]))
m = as_rotation_matrix(R)
m_axes = list(range(m.ndim))
v_axes = list(range(m.ndim, m.ndim+v.ndim))
mv_axes = list(v_axes)
mv_axes[axis] = m_axes[-2]
mv_axes = m_axes[:-2] + mv_axes
v_axes[axis] = m_axes[-1]
return np.einsum(m, m_axes, v, v_axes, mv_axes)
def isclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False):
"""
Returns a boolean array where two arrays are element-wise equal within a
tolerance.
This function is essentially a copy of the `numpy.isclose` function,
with different default tolerances and one minor changes necessary to
deal correctly with quaternions.
The tolerance values are positive, typically very small numbers. The
relative difference (`rtol` * abs(`b`)) and the absolute difference
`atol` are added together to compare against the absolute difference
between `a` and `b`.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
Returns
-------
y : array_like
Returns a boolean array of where `a` and `b` are equal within the
given tolerance. If both `a` and `b` are scalars, returns a single
boolean value.
See Also
--------
allclose
Notes
-----
For finite values, isclose uses the following equation to test whether
two floating point values are equivalent:
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`isclose(a, b)` might be different from `isclose(b, a)` in
some rare cases.
Examples
--------
>>> quaternion.isclose([1e10*quaternion.x, 1e-7*quaternion.y], [1.00001e10*quaternion.x, 1e-8*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, False])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.00001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, True])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.0001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([False, True])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y])
array([True, False])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y], equal_nan=True)
array([True, True])
"""
def within_tol(x, y, atol, rtol):
with np.errstate(invalid='ignore'):
result = np.less_equal(abs(x-y), atol + rtol * abs(y))
if np.isscalar(a) and np.isscalar(b):
result = bool(result)
return result
x = np.array(a, copy=False, subok=True, ndmin=1)
y = np.array(b, copy=False, subok=True, ndmin=1)
# Make sure y is an inexact type to avoid bad behavior on abs(MIN_INT).
# This will cause casting of x later. Also, make sure to allow subclasses
# (e.g., for numpy.ma).
try:
dt = np.result_type(y, 1.)
except TypeError:
dt = np.dtype(np.quaternion)
y = np.array(y, dtype=dt, copy=False, subok=True)
xfin = np.isfinite(x)
yfin = np.isfinite(y)
if np.all(xfin) and np.all(yfin):
return within_tol(x, y, atol, rtol)
else:
finite = xfin & yfin
cond = np.zeros_like(finite, subok=True)
# Because we're using boolean indexing, x & y must be the same shape.
# Ideally, we'd just do x, y = broadcast_arrays(x, y). It's in
# lib.stride_tricks, though, so we can't import it here.
x = x * np.ones_like(cond)
y = y * np.ones_like(cond)
# Avoid subtraction with infinite/nan values...
cond[finite] = within_tol(x[finite], y[finite], atol, rtol)
# Check for equality of infinite values...
cond[~finite] = (x[~finite] == y[~finite])
if equal_nan:
# Make NaN == NaN
both_nan = np.isnan(x) & np.isnan(y)
cond[both_nan] = both_nan[both_nan]
if np.isscalar(a) and np.isscalar(b):
return bool(cond)
else:
return cond
def allclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False, verbose=False):
"""
Returns True if two arrays are element-wise equal within a tolerance.
This function is essentially a wrapper for the `quaternion.isclose`
function, but returns a single boolean value of True if all elements
of the output from `quaternion.isclose` are True, and False otherwise.
This function also adds the option.
Note that this function has stricter tolerances than the
`numpy.allclose` function, as well as the additional `verbose` option.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
verbose : bool
If the return value is False,
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
See Also
--------
isclose, numpy.all, numpy.any, numpy.allclose
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
Notes
-----
If the following equation is element-wise True, then allclose returns
True.
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`allclose(a, b)` might be different from `allclose(b, a)` in
some rare cases.
"""
close = isclose(a, b, rtol=rtol, atol=atol, equal_nan=equal_nan)
result = np.all(close)
if verbose and not result:
print('Non-close values:')
for i in np.argwhere(close == False):
i = tuple(i)
print('\n x[{0}]={1}\n y[{0}]={2}'.format(i, a[i], b[i]))
return result
|
moble/quaternion
|
__init__.py
|
from_rotation_matrix
|
python
|
def from_rotation_matrix(rot, nonorthogonal=True):
try:
from scipy import linalg
except ImportError:
linalg = False
rot = np.array(rot, copy=False)
shape = rot.shape[:-2]
if linalg and nonorthogonal:
from operator import mul
from functools import reduce
K3 = np.empty(shape+(4, 4))
K3[..., 0, 0] = (rot[..., 0, 0] - rot[..., 1, 1] - rot[..., 2, 2])/3.0
K3[..., 0, 1] = (rot[..., 1, 0] + rot[..., 0, 1])/3.0
K3[..., 0, 2] = (rot[..., 2, 0] + rot[..., 0, 2])/3.0
K3[..., 0, 3] = (rot[..., 1, 2] - rot[..., 2, 1])/3.0
K3[..., 1, 0] = K3[..., 0, 1]
K3[..., 1, 1] = (rot[..., 1, 1] - rot[..., 0, 0] - rot[..., 2, 2])/3.0
K3[..., 1, 2] = (rot[..., 2, 1] + rot[..., 1, 2])/3.0
K3[..., 1, 3] = (rot[..., 2, 0] - rot[..., 0, 2])/3.0
K3[..., 2, 0] = K3[..., 0, 2]
K3[..., 2, 1] = K3[..., 1, 2]
K3[..., 2, 2] = (rot[..., 2, 2] - rot[..., 0, 0] - rot[..., 1, 1])/3.0
K3[..., 2, 3] = (rot[..., 0, 1] - rot[..., 1, 0])/3.0
K3[..., 3, 0] = K3[..., 0, 3]
K3[..., 3, 1] = K3[..., 1, 3]
K3[..., 3, 2] = K3[..., 2, 3]
K3[..., 3, 3] = (rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2])/3.0
if not shape:
q = zero.copy()
eigvals, eigvecs = linalg.eigh(K3.T, eigvals=(3, 3))
q.components[0] = eigvecs[-1]
q.components[1:] = -eigvecs[:-1].flatten()
return q
else:
q = np.empty(shape+(4,), dtype=np.float)
for flat_index in range(reduce(mul, shape)):
multi_index = np.unravel_index(flat_index, shape)
eigvals, eigvecs = linalg.eigh(K3[multi_index], eigvals=(3, 3))
q[multi_index+(0,)] = eigvecs[-1]
q[multi_index+(slice(1,None),)] = -eigvecs[:-1].flatten()
return as_quat_array(q)
else: # No scipy.linalg or not `nonorthogonal`
diagonals = np.empty(shape+(4,))
diagonals[..., 0] = rot[..., 0, 0]
diagonals[..., 1] = rot[..., 1, 1]
diagonals[..., 2] = rot[..., 2, 2]
diagonals[..., 3] = rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2]
indices = np.argmax(diagonals, axis=-1)
q = diagonals # reuse storage space
indices_i = (indices == 0)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 1] = 1 + rot_i[..., 0, 0] - rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 2] = rot_i[..., 0, 1] + rot_i[..., 1, 0]
q[indices_i, 3] = rot_i[..., 0, 2] + rot_i[..., 2, 0]
indices_i = (indices == 1)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 1] = rot_i[..., 1, 0] + rot_i[..., 0, 1]
q[indices_i, 2] = 1 - rot_i[..., 0, 0] + rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 3] = rot_i[..., 1, 2] + rot_i[..., 2, 1]
indices_i = (indices == 2)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q[indices_i, 1] = rot_i[..., 2, 0] + rot_i[..., 0, 2]
q[indices_i, 2] = rot_i[..., 2, 1] + rot_i[..., 1, 2]
q[indices_i, 3] = 1 - rot_i[..., 0, 0] - rot_i[..., 1, 1] + rot_i[..., 2, 2]
indices_i = (indices == 3)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = 1 + rot_i[..., 0, 0] + rot_i[..., 1, 1] + rot_i[..., 2, 2]
q[indices_i, 1] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 2] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 3] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q /= np.linalg.norm(q, axis=-1)[..., np.newaxis]
return as_quat_array(q)
|
Convert input 3x3 rotation matrix to unit quaternion
By default, if scipy.linalg is available, this function uses
Bar-Itzhack's algorithm to allow for non-orthogonal matrices.
[J. Guidance, Vol. 23, No. 6, p. 1085 <http://dx.doi.org/10.2514/2.4654>]
This will almost certainly be quite a bit slower than simpler versions,
though it will be more robust to numerical errors in the rotation matrix.
Also note that Bar-Itzhack uses some pretty weird conventions. The last
component of the quaternion appears to represent the scalar, and the
quaternion itself is conjugated relative to the convention used
throughout this module.
If scipy.linalg is not available or if the optional
`nonorthogonal` parameter is set to `False`, this function falls
back to the possibly faster, but less robust, algorithm of Markley
[J. Guidance, Vol. 31, No. 2, p. 440
<http://dx.doi.org/10.2514/1.31730>].
Parameters
----------
rot: (...Nx3x3) float array
Each 3x3 matrix represents a rotation by multiplying (from the left)
a column vector to produce a rotated column vector. Note that this
input may actually have ndims>3; it is just assumed that the last
two dimensions have size 3, representing the matrix.
nonorthogonal: bool, optional
If scipy.linalg is available, use the more robust algorithm of
Bar-Itzhack. Default value is True.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-2].
Raises
------
LinAlgError
If any of the eigenvalue solutions does not converge
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/__init__.py#L190-L326
|
[
"def as_quat_array(a):\n \"\"\"View a float array as an array of quaternions\n\n The input array must have a final dimension whose size is\n divisible by four (or better yet *is* 4), because successive\n indices in that last dimension will be considered successive\n components of the output quaternion.\n\n This function is usually fast (of order 1 microsecond) because no\n data is copied; the returned quantity is just a \"view\" of the\n original. However, if the input array is not C-contiguous\n (basically, as you increment the index into the last dimension of\n the array, you just move to the neighboring float in memory), the\n data will need to be copied which may be quite slow. Therefore,\n you should try to ensure that the input array is in that order.\n Slices and transpositions will frequently break that rule.\n\n We will not convert back from a two-spinor array because there is\n no unique convention for them, so I don't want to mess with that.\n Also, we want to discourage users from the slow, memory-copying\n process of swapping columns required for useful definitions of\n the two-spinors.\n\n \"\"\"\n a = np.asarray(a, dtype=np.double)\n\n # fast path\n if a.shape == (4,):\n return quaternion(a[0], a[1], a[2], a[3])\n\n # view only works if the last axis is C-contiguous\n if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:\n a = a.copy(order='C')\n try:\n av = a.view(np.quaternion)\n except ValueError as e:\n message = (str(e) + '\\n '\n + 'Failed to view input data as a series of quaternions. '\n + 'Please ensure that the last dimension has size divisible by 4.\\n '\n + 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))\n raise ValueError(message)\n\n # special case: don't create an axis for a single quaternion, to\n # match the output of `as_float_array`\n if av.shape[-1] == 1:\n av = av.reshape(a.shape[:-1])\n\n return av\n"
] |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import division, print_function, absolute_import
import numpy as np
from .numpy_quaternion import (quaternion, _eps,
slerp_evaluate, squad_evaluate,
# slerp_vectorized, squad_vectorized,
# slerp, squad,
)
from .quaternion_time_series import slerp, squad, integrate_angular_velocity, minimal_rotation
from .calculus import derivative, definite_integral, indefinite_integral
from .means import mean_rotor_in_chordal_metric, optimal_alignment_in_chordal_metric
from ._version import __version__
__doc_title__ = "Quaternion dtype for NumPy"
__doc__ = "Adds a quaternion dtype to NumPy."
__all__ = ['quaternion',
'as_quat_array', 'as_spinor_array',
'as_float_array', 'from_float_array',
'as_rotation_matrix', 'from_rotation_matrix',
'as_rotation_vector', 'from_rotation_vector',
'as_euler_angles', 'from_euler_angles',
'as_spherical_coords', 'from_spherical_coords',
'rotate_vectors', 'allclose',
'rotor_intrinsic_distance', 'rotor_chordal_distance',
'rotation_intrinsic_distance', 'rotation_chordal_distance',
'slerp_evaluate', 'squad_evaluate',
'zero', 'one', 'x', 'y', 'z', 'integrate_angular_velocity',
'squad', 'slerp', 'derivative', 'definite_integral', 'indefinite_integral']
if 'quaternion' in np.__dict__:
raise RuntimeError('The NumPy package already has a quaternion type')
np.quaternion = quaternion
np.typeDict['quaternion'] = np.dtype(quaternion)
zero = np.quaternion(0, 0, 0, 0)
one = np.quaternion(1, 0, 0, 0)
x = np.quaternion(0, 1, 0, 0)
y = np.quaternion(0, 0, 1, 0)
z = np.quaternion(0, 0, 0, 1)
rotor_intrinsic_distance = np.rotor_intrinsic_distance
rotor_chordal_distance = np.rotor_chordal_distance
rotation_intrinsic_distance = np.rotation_intrinsic_distance
rotation_chordal_distance = np.rotation_chordal_distance
def as_float_array(a):
"""View the quaternion array as an array of floats
This function is fast (of order 1 microsecond) because no data is
copied; the returned quantity is just a "view" of the original.
The output view has one more dimension (of size 4) than the input
array, but is otherwise the same shape.
"""
return np.asarray(a, dtype=np.quaternion).view((np.double, 4))
def as_quat_array(a):
"""View a float array as an array of quaternions
The input array must have a final dimension whose size is
divisible by four (or better yet *is* 4), because successive
indices in that last dimension will be considered successive
components of the output quaternion.
This function is usually fast (of order 1 microsecond) because no
data is copied; the returned quantity is just a "view" of the
original. However, if the input array is not C-contiguous
(basically, as you increment the index into the last dimension of
the array, you just move to the neighboring float in memory), the
data will need to be copied which may be quite slow. Therefore,
you should try to ensure that the input array is in that order.
Slices and transpositions will frequently break that rule.
We will not convert back from a two-spinor array because there is
no unique convention for them, so I don't want to mess with that.
Also, we want to discourage users from the slow, memory-copying
process of swapping columns required for useful definitions of
the two-spinors.
"""
a = np.asarray(a, dtype=np.double)
# fast path
if a.shape == (4,):
return quaternion(a[0], a[1], a[2], a[3])
# view only works if the last axis is C-contiguous
if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:
a = a.copy(order='C')
try:
av = a.view(np.quaternion)
except ValueError as e:
message = (str(e) + '\n '
+ 'Failed to view input data as a series of quaternions. '
+ 'Please ensure that the last dimension has size divisible by 4.\n '
+ 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))
raise ValueError(message)
# special case: don't create an axis for a single quaternion, to
# match the output of `as_float_array`
if av.shape[-1] == 1:
av = av.reshape(a.shape[:-1])
return av
def from_float_array(a):
return as_quat_array(a)
def as_spinor_array(a):
"""View a quaternion array as spinors in two-complex representation
This function is relatively slow and scales poorly, because memory
copying is apparently involved -- I think it's due to the
"advanced indexing" required to swap the columns.
"""
a = np.atleast_1d(a)
assert a.dtype == np.dtype(np.quaternion)
# I'm not sure why it has to be so complicated, but all of these steps
# appear to be necessary in this case.
return a.view(np.float).reshape(a.shape + (4,))[..., [0, 3, 2, 1]].ravel().view(np.complex).reshape(a.shape + (2,))
def as_rotation_matrix(q):
"""Convert input quaternion to 3x3 rotation matrix
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,3). This matrix should multiply (from
the left) a column vector to produce the rotated column vector.
Raises
------
ZeroDivisionError
If any of the input quaternions have norm 0.0.
"""
if q.shape == () and not isinstance(q, np.ndarray): # This is just a single quaternion
n = q.norm()
if n == 0.0:
raise ZeroDivisionError("Input to `as_rotation_matrix({0})` has zero norm".format(q))
elif abs(n-1.0) < _eps: # Input q is basically normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2), 2*(q.x*q.y - q.z*q.w), 2*(q.x*q.z + q.y*q.w)],
[2*(q.x*q.y + q.z*q.w), 1 - 2*(q.x**2 + q.z**2), 2*(q.y*q.z - q.x*q.w)],
[2*(q.x*q.z - q.y*q.w), 2*(q.y*q.z + q.x*q.w), 1 - 2*(q.x**2 + q.y**2)]
])
else: # Input q is not normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2)/n, 2*(q.x*q.y - q.z*q.w)/n, 2*(q.x*q.z + q.y*q.w)/n],
[2*(q.x*q.y + q.z*q.w)/n, 1 - 2*(q.x**2 + q.z**2)/n, 2*(q.y*q.z - q.x*q.w)/n],
[2*(q.x*q.z - q.y*q.w)/n, 2*(q.y*q.z + q.x*q.w)/n, 1 - 2*(q.x**2 + q.y**2)/n]
])
else: # This is an array of quaternions
n = np.norm(q)
if np.any(n == 0.0):
raise ZeroDivisionError("Array input to `as_rotation_matrix` has at least one element with zero norm")
else: # Assume input q is not normalized
m = np.empty(q.shape + (3, 3))
q = as_float_array(q)
m[..., 0, 0] = 1.0 - 2*(q[..., 2]**2 + q[..., 3]**2)/n
m[..., 0, 1] = 2*(q[..., 1]*q[..., 2] - q[..., 3]*q[..., 0])/n
m[..., 0, 2] = 2*(q[..., 1]*q[..., 3] + q[..., 2]*q[..., 0])/n
m[..., 1, 0] = 2*(q[..., 1]*q[..., 2] + q[..., 3]*q[..., 0])/n
m[..., 1, 1] = 1.0 - 2*(q[..., 1]**2 + q[..., 3]**2)/n
m[..., 1, 2] = 2*(q[..., 2]*q[..., 3] - q[..., 1]*q[..., 0])/n
m[..., 2, 0] = 2*(q[..., 1]*q[..., 3] - q[..., 2]*q[..., 0])/n
m[..., 2, 1] = 2*(q[..., 2]*q[..., 3] + q[..., 1]*q[..., 0])/n
m[..., 2, 2] = 1.0 - 2*(q[..., 1]**2 + q[..., 2]**2)/n
return m
def as_rotation_vector(q):
"""Convert input quaternion to the axis-angle representation
Note that if any of the input quaternions has norm zero, no error is
raised, but NaNs will appear in the output.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,). Each vector represents the axis of
the rotation, with norm proportional to the angle of the rotation in
radians.
"""
return as_float_array(2*np.log(np.normalized(q)))[..., 1:]
def from_rotation_vector(rot):
"""Convert input 3-vector in axis-angle representation to unit quaternion
Parameters
----------
rot: (Nx3) float array
Each vector represents the axis of the rotation, with norm
proportional to the angle of the rotation in radians.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-1].
"""
rot = np.array(rot, copy=False)
quats = np.zeros(rot.shape[:-1]+(4,))
quats[..., 1:] = rot[...]/2
quats = as_quat_array(quats)
return np.exp(quats)
def as_euler_angles(q):
"""Open Pandora's Box
If somebody is trying to make you use Euler angles, tell them no, and
walk away, and go and tell your mum.
You don't want to use Euler angles. They are awful. Stay away. It's
one thing to convert from Euler angles to quaternions; at least you're
moving in the right direction. But to go the other way?! It's just not
right.
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles are naturally in radians.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
alpha_beta_gamma: float array
Output shape is q.shape+(3,). These represent the angles (alpha,
beta, gamma) in radians, where the normalized input quaternion
represents `exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)`.
Raises
------
AllHell
...if you try to actually use Euler angles, when you could have
been using quaternions like a sensible person.
"""
alpha_beta_gamma = np.empty(q.shape + (3,), dtype=np.float)
n = np.norm(q)
q = as_float_array(q)
alpha_beta_gamma[..., 0] = np.arctan2(q[..., 3], q[..., 0]) + np.arctan2(-q[..., 1], q[..., 2])
alpha_beta_gamma[..., 1] = 2*np.arccos(np.sqrt((q[..., 0]**2 + q[..., 3]**2)/n))
alpha_beta_gamma[..., 2] = np.arctan2(q[..., 3], q[..., 0]) - np.arctan2(-q[..., 1], q[..., 2])
return alpha_beta_gamma
def from_euler_angles(alpha_beta_gamma, beta=None, gamma=None):
"""Improve your life drastically
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles naturally must be in radians for this to make any sense.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
alpha_beta_gamma: float or array of floats
This argument may either contain an array with last dimension of
size 3, where those three elements describe the (alpha, beta, gamma)
radian values for each rotation; or it may contain just the alpha
values, in which case the next two arguments must also be given.
beta: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and third arguments.
gamma: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and second arguments.
Returns
-------
R: quaternion array
The shape of this array will be the same as the input, except that
the last dimension will be removed.
"""
# Figure out the input angles from either type of input
if gamma is None:
alpha_beta_gamma = np.asarray(alpha_beta_gamma, dtype=np.double)
alpha = alpha_beta_gamma[..., 0]
beta = alpha_beta_gamma[..., 1]
gamma = alpha_beta_gamma[..., 2]
else:
alpha = np.asarray(alpha_beta_gamma, dtype=np.double)
beta = np.asarray(beta, dtype=np.double)
gamma = np.asarray(gamma, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(alpha, beta, gamma).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(beta/2)*np.cos((alpha+gamma)/2) # scalar quaternion components
R[..., 1] = -np.sin(beta/2)*np.sin((alpha-gamma)/2) # x quaternion components
R[..., 2] = np.sin(beta/2)*np.cos((alpha-gamma)/2) # y quaternion components
R[..., 3] = np.cos(beta/2)*np.sin((alpha+gamma)/2) # z quaternion components
return as_quat_array(R)
def as_spherical_coords(q):
"""Return the spherical coordinates corresponding to this quaternion
Obviously, spherical coordinates do not contain as much information as a
quaternion, so this function does lose some information. However, the
returned spherical coordinates will represent the point(s) on the sphere
to which the input quaternion(s) rotate the z axis.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must be nonzero
Returns
-------
vartheta_varphi: float array
Output shape is q.shape+(2,). These represent the angles (vartheta,
varphi) in radians, where the normalized input quaternion represents
`exp(varphi*z/2) * exp(vartheta*y/2)`, up to an arbitrary inital
rotation about `z`.
"""
return as_euler_angles(q)[..., 1::-1]
def from_spherical_coords(theta_phi, phi=None):
"""Return the quaternion corresponding to these spherical coordinates
Assumes the spherical coordinates correspond to the quaternion R via
R = exp(phi*z/2) * exp(theta*y/2)
The angles naturally must be in radians for this to make any sense.
Note that this quaternion rotates `z` onto the point with the given
spherical coordinates, but also rotates `x` and `y` onto the usual basis
vectors (theta and phi, respectively) at that point.
Parameters
----------
theta_phi: float or array of floats
This argument may either contain an array with last dimension of
size 2, where those two elements describe the (theta, phi) values in
radians for each point; or it may contain just the theta values in
radians, in which case the next argument must also be given.
phi: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first argument.
Returns
-------
R: quaternion array
If the second argument is not given to this function, the shape
will be the same as the input shape except for the last dimension,
which will be removed. If the second argument is given, this
output array will have the shape resulting from broadcasting the
two input arrays against each other.
"""
# Figure out the input angles from either type of input
if phi is None:
theta_phi = np.asarray(theta_phi, dtype=np.double)
theta = theta_phi[..., 0]
phi = theta_phi[..., 1]
else:
theta = np.asarray(theta_phi, dtype=np.double)
phi = np.asarray(phi, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(theta, phi).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(phi/2)*np.cos(theta/2) # scalar quaternion components
R[..., 1] = -np.sin(phi/2)*np.sin(theta/2) # x quaternion components
R[..., 2] = np.cos(phi/2)*np.sin(theta/2) # y quaternion components
R[..., 3] = np.sin(phi/2)*np.cos(theta/2) # z quaternion components
return as_quat_array(R)
def rotate_vectors(R, v, axis=-1):
"""Rotate vectors by given quaternions
For simplicity, this function simply converts the input
quaternion(s) to a matrix, and rotates the input vector(s) by the
usual matrix multiplication. However, it should be noted that if
each input quaternion is only used to rotate a single vector, it
is more efficient (in terms of operation counts) to use the
formula
v' = v + 2 * r x (s * v + r x v) / m
where x represents the cross product, s and r are the scalar and
vector parts of the quaternion, respectively, and m is the sum of
the squares of the components of the quaternion. If you are
looping over a very large number of quaternions, and just rotating
a single vector each time, you might want to implement that
alternative algorithm using numba (or something that doesn't use
python).
Parameters
==========
R: quaternion array
Quaternions by which to rotate the input vectors
v: float array
Three-vectors to be rotated.
axis: int
Axis of the `v` array to use as the vector dimension. This
axis of `v` must have length 3.
Returns
=======
vprime: float array
The rotated vectors. This array has shape R.shape+v.shape.
"""
R = np.asarray(R, dtype=np.quaternion)
v = np.asarray(v, dtype=float)
if v.ndim < 1 or 3 not in v.shape:
raise ValueError("Input `v` does not have at least one dimension of length 3")
if v.shape[axis] != 3:
raise ValueError("Input `v` axis {0} has length {1}, not 3.".format(axis, v.shape[axis]))
m = as_rotation_matrix(R)
m_axes = list(range(m.ndim))
v_axes = list(range(m.ndim, m.ndim+v.ndim))
mv_axes = list(v_axes)
mv_axes[axis] = m_axes[-2]
mv_axes = m_axes[:-2] + mv_axes
v_axes[axis] = m_axes[-1]
return np.einsum(m, m_axes, v, v_axes, mv_axes)
def isclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False):
"""
Returns a boolean array where two arrays are element-wise equal within a
tolerance.
This function is essentially a copy of the `numpy.isclose` function,
with different default tolerances and one minor changes necessary to
deal correctly with quaternions.
The tolerance values are positive, typically very small numbers. The
relative difference (`rtol` * abs(`b`)) and the absolute difference
`atol` are added together to compare against the absolute difference
between `a` and `b`.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
Returns
-------
y : array_like
Returns a boolean array of where `a` and `b` are equal within the
given tolerance. If both `a` and `b` are scalars, returns a single
boolean value.
See Also
--------
allclose
Notes
-----
For finite values, isclose uses the following equation to test whether
two floating point values are equivalent:
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`isclose(a, b)` might be different from `isclose(b, a)` in
some rare cases.
Examples
--------
>>> quaternion.isclose([1e10*quaternion.x, 1e-7*quaternion.y], [1.00001e10*quaternion.x, 1e-8*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, False])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.00001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, True])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.0001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([False, True])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y])
array([True, False])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y], equal_nan=True)
array([True, True])
"""
def within_tol(x, y, atol, rtol):
with np.errstate(invalid='ignore'):
result = np.less_equal(abs(x-y), atol + rtol * abs(y))
if np.isscalar(a) and np.isscalar(b):
result = bool(result)
return result
x = np.array(a, copy=False, subok=True, ndmin=1)
y = np.array(b, copy=False, subok=True, ndmin=1)
# Make sure y is an inexact type to avoid bad behavior on abs(MIN_INT).
# This will cause casting of x later. Also, make sure to allow subclasses
# (e.g., for numpy.ma).
try:
dt = np.result_type(y, 1.)
except TypeError:
dt = np.dtype(np.quaternion)
y = np.array(y, dtype=dt, copy=False, subok=True)
xfin = np.isfinite(x)
yfin = np.isfinite(y)
if np.all(xfin) and np.all(yfin):
return within_tol(x, y, atol, rtol)
else:
finite = xfin & yfin
cond = np.zeros_like(finite, subok=True)
# Because we're using boolean indexing, x & y must be the same shape.
# Ideally, we'd just do x, y = broadcast_arrays(x, y). It's in
# lib.stride_tricks, though, so we can't import it here.
x = x * np.ones_like(cond)
y = y * np.ones_like(cond)
# Avoid subtraction with infinite/nan values...
cond[finite] = within_tol(x[finite], y[finite], atol, rtol)
# Check for equality of infinite values...
cond[~finite] = (x[~finite] == y[~finite])
if equal_nan:
# Make NaN == NaN
both_nan = np.isnan(x) & np.isnan(y)
cond[both_nan] = both_nan[both_nan]
if np.isscalar(a) and np.isscalar(b):
return bool(cond)
else:
return cond
def allclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False, verbose=False):
"""
Returns True if two arrays are element-wise equal within a tolerance.
This function is essentially a wrapper for the `quaternion.isclose`
function, but returns a single boolean value of True if all elements
of the output from `quaternion.isclose` are True, and False otherwise.
This function also adds the option.
Note that this function has stricter tolerances than the
`numpy.allclose` function, as well as the additional `verbose` option.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
verbose : bool
If the return value is False,
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
See Also
--------
isclose, numpy.all, numpy.any, numpy.allclose
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
Notes
-----
If the following equation is element-wise True, then allclose returns
True.
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`allclose(a, b)` might be different from `allclose(b, a)` in
some rare cases.
"""
close = isclose(a, b, rtol=rtol, atol=atol, equal_nan=equal_nan)
result = np.all(close)
if verbose and not result:
print('Non-close values:')
for i in np.argwhere(close == False):
i = tuple(i)
print('\n x[{0}]={1}\n y[{0}]={2}'.format(i, a[i], b[i]))
return result
|
moble/quaternion
|
__init__.py
|
from_rotation_vector
|
python
|
def from_rotation_vector(rot):
rot = np.array(rot, copy=False)
quats = np.zeros(rot.shape[:-1]+(4,))
quats[..., 1:] = rot[...]/2
quats = as_quat_array(quats)
return np.exp(quats)
|
Convert input 3-vector in axis-angle representation to unit quaternion
Parameters
----------
rot: (Nx3) float array
Each vector represents the axis of the rotation, with norm
proportional to the angle of the rotation in radians.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-1].
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/__init__.py#L351-L371
|
[
"def as_quat_array(a):\n \"\"\"View a float array as an array of quaternions\n\n The input array must have a final dimension whose size is\n divisible by four (or better yet *is* 4), because successive\n indices in that last dimension will be considered successive\n components of the output quaternion.\n\n This function is usually fast (of order 1 microsecond) because no\n data is copied; the returned quantity is just a \"view\" of the\n original. However, if the input array is not C-contiguous\n (basically, as you increment the index into the last dimension of\n the array, you just move to the neighboring float in memory), the\n data will need to be copied which may be quite slow. Therefore,\n you should try to ensure that the input array is in that order.\n Slices and transpositions will frequently break that rule.\n\n We will not convert back from a two-spinor array because there is\n no unique convention for them, so I don't want to mess with that.\n Also, we want to discourage users from the slow, memory-copying\n process of swapping columns required for useful definitions of\n the two-spinors.\n\n \"\"\"\n a = np.asarray(a, dtype=np.double)\n\n # fast path\n if a.shape == (4,):\n return quaternion(a[0], a[1], a[2], a[3])\n\n # view only works if the last axis is C-contiguous\n if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:\n a = a.copy(order='C')\n try:\n av = a.view(np.quaternion)\n except ValueError as e:\n message = (str(e) + '\\n '\n + 'Failed to view input data as a series of quaternions. '\n + 'Please ensure that the last dimension has size divisible by 4.\\n '\n + 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))\n raise ValueError(message)\n\n # special case: don't create an axis for a single quaternion, to\n # match the output of `as_float_array`\n if av.shape[-1] == 1:\n av = av.reshape(a.shape[:-1])\n\n return av\n"
] |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import division, print_function, absolute_import
import numpy as np
from .numpy_quaternion import (quaternion, _eps,
slerp_evaluate, squad_evaluate,
# slerp_vectorized, squad_vectorized,
# slerp, squad,
)
from .quaternion_time_series import slerp, squad, integrate_angular_velocity, minimal_rotation
from .calculus import derivative, definite_integral, indefinite_integral
from .means import mean_rotor_in_chordal_metric, optimal_alignment_in_chordal_metric
from ._version import __version__
__doc_title__ = "Quaternion dtype for NumPy"
__doc__ = "Adds a quaternion dtype to NumPy."
__all__ = ['quaternion',
'as_quat_array', 'as_spinor_array',
'as_float_array', 'from_float_array',
'as_rotation_matrix', 'from_rotation_matrix',
'as_rotation_vector', 'from_rotation_vector',
'as_euler_angles', 'from_euler_angles',
'as_spherical_coords', 'from_spherical_coords',
'rotate_vectors', 'allclose',
'rotor_intrinsic_distance', 'rotor_chordal_distance',
'rotation_intrinsic_distance', 'rotation_chordal_distance',
'slerp_evaluate', 'squad_evaluate',
'zero', 'one', 'x', 'y', 'z', 'integrate_angular_velocity',
'squad', 'slerp', 'derivative', 'definite_integral', 'indefinite_integral']
if 'quaternion' in np.__dict__:
raise RuntimeError('The NumPy package already has a quaternion type')
np.quaternion = quaternion
np.typeDict['quaternion'] = np.dtype(quaternion)
zero = np.quaternion(0, 0, 0, 0)
one = np.quaternion(1, 0, 0, 0)
x = np.quaternion(0, 1, 0, 0)
y = np.quaternion(0, 0, 1, 0)
z = np.quaternion(0, 0, 0, 1)
rotor_intrinsic_distance = np.rotor_intrinsic_distance
rotor_chordal_distance = np.rotor_chordal_distance
rotation_intrinsic_distance = np.rotation_intrinsic_distance
rotation_chordal_distance = np.rotation_chordal_distance
def as_float_array(a):
"""View the quaternion array as an array of floats
This function is fast (of order 1 microsecond) because no data is
copied; the returned quantity is just a "view" of the original.
The output view has one more dimension (of size 4) than the input
array, but is otherwise the same shape.
"""
return np.asarray(a, dtype=np.quaternion).view((np.double, 4))
def as_quat_array(a):
"""View a float array as an array of quaternions
The input array must have a final dimension whose size is
divisible by four (or better yet *is* 4), because successive
indices in that last dimension will be considered successive
components of the output quaternion.
This function is usually fast (of order 1 microsecond) because no
data is copied; the returned quantity is just a "view" of the
original. However, if the input array is not C-contiguous
(basically, as you increment the index into the last dimension of
the array, you just move to the neighboring float in memory), the
data will need to be copied which may be quite slow. Therefore,
you should try to ensure that the input array is in that order.
Slices and transpositions will frequently break that rule.
We will not convert back from a two-spinor array because there is
no unique convention for them, so I don't want to mess with that.
Also, we want to discourage users from the slow, memory-copying
process of swapping columns required for useful definitions of
the two-spinors.
"""
a = np.asarray(a, dtype=np.double)
# fast path
if a.shape == (4,):
return quaternion(a[0], a[1], a[2], a[3])
# view only works if the last axis is C-contiguous
if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:
a = a.copy(order='C')
try:
av = a.view(np.quaternion)
except ValueError as e:
message = (str(e) + '\n '
+ 'Failed to view input data as a series of quaternions. '
+ 'Please ensure that the last dimension has size divisible by 4.\n '
+ 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))
raise ValueError(message)
# special case: don't create an axis for a single quaternion, to
# match the output of `as_float_array`
if av.shape[-1] == 1:
av = av.reshape(a.shape[:-1])
return av
def from_float_array(a):
return as_quat_array(a)
def as_spinor_array(a):
"""View a quaternion array as spinors in two-complex representation
This function is relatively slow and scales poorly, because memory
copying is apparently involved -- I think it's due to the
"advanced indexing" required to swap the columns.
"""
a = np.atleast_1d(a)
assert a.dtype == np.dtype(np.quaternion)
# I'm not sure why it has to be so complicated, but all of these steps
# appear to be necessary in this case.
return a.view(np.float).reshape(a.shape + (4,))[..., [0, 3, 2, 1]].ravel().view(np.complex).reshape(a.shape + (2,))
def as_rotation_matrix(q):
"""Convert input quaternion to 3x3 rotation matrix
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,3). This matrix should multiply (from
the left) a column vector to produce the rotated column vector.
Raises
------
ZeroDivisionError
If any of the input quaternions have norm 0.0.
"""
if q.shape == () and not isinstance(q, np.ndarray): # This is just a single quaternion
n = q.norm()
if n == 0.0:
raise ZeroDivisionError("Input to `as_rotation_matrix({0})` has zero norm".format(q))
elif abs(n-1.0) < _eps: # Input q is basically normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2), 2*(q.x*q.y - q.z*q.w), 2*(q.x*q.z + q.y*q.w)],
[2*(q.x*q.y + q.z*q.w), 1 - 2*(q.x**2 + q.z**2), 2*(q.y*q.z - q.x*q.w)],
[2*(q.x*q.z - q.y*q.w), 2*(q.y*q.z + q.x*q.w), 1 - 2*(q.x**2 + q.y**2)]
])
else: # Input q is not normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2)/n, 2*(q.x*q.y - q.z*q.w)/n, 2*(q.x*q.z + q.y*q.w)/n],
[2*(q.x*q.y + q.z*q.w)/n, 1 - 2*(q.x**2 + q.z**2)/n, 2*(q.y*q.z - q.x*q.w)/n],
[2*(q.x*q.z - q.y*q.w)/n, 2*(q.y*q.z + q.x*q.w)/n, 1 - 2*(q.x**2 + q.y**2)/n]
])
else: # This is an array of quaternions
n = np.norm(q)
if np.any(n == 0.0):
raise ZeroDivisionError("Array input to `as_rotation_matrix` has at least one element with zero norm")
else: # Assume input q is not normalized
m = np.empty(q.shape + (3, 3))
q = as_float_array(q)
m[..., 0, 0] = 1.0 - 2*(q[..., 2]**2 + q[..., 3]**2)/n
m[..., 0, 1] = 2*(q[..., 1]*q[..., 2] - q[..., 3]*q[..., 0])/n
m[..., 0, 2] = 2*(q[..., 1]*q[..., 3] + q[..., 2]*q[..., 0])/n
m[..., 1, 0] = 2*(q[..., 1]*q[..., 2] + q[..., 3]*q[..., 0])/n
m[..., 1, 1] = 1.0 - 2*(q[..., 1]**2 + q[..., 3]**2)/n
m[..., 1, 2] = 2*(q[..., 2]*q[..., 3] - q[..., 1]*q[..., 0])/n
m[..., 2, 0] = 2*(q[..., 1]*q[..., 3] - q[..., 2]*q[..., 0])/n
m[..., 2, 1] = 2*(q[..., 2]*q[..., 3] + q[..., 1]*q[..., 0])/n
m[..., 2, 2] = 1.0 - 2*(q[..., 1]**2 + q[..., 2]**2)/n
return m
def from_rotation_matrix(rot, nonorthogonal=True):
"""Convert input 3x3 rotation matrix to unit quaternion
By default, if scipy.linalg is available, this function uses
Bar-Itzhack's algorithm to allow for non-orthogonal matrices.
[J. Guidance, Vol. 23, No. 6, p. 1085 <http://dx.doi.org/10.2514/2.4654>]
This will almost certainly be quite a bit slower than simpler versions,
though it will be more robust to numerical errors in the rotation matrix.
Also note that Bar-Itzhack uses some pretty weird conventions. The last
component of the quaternion appears to represent the scalar, and the
quaternion itself is conjugated relative to the convention used
throughout this module.
If scipy.linalg is not available or if the optional
`nonorthogonal` parameter is set to `False`, this function falls
back to the possibly faster, but less robust, algorithm of Markley
[J. Guidance, Vol. 31, No. 2, p. 440
<http://dx.doi.org/10.2514/1.31730>].
Parameters
----------
rot: (...Nx3x3) float array
Each 3x3 matrix represents a rotation by multiplying (from the left)
a column vector to produce a rotated column vector. Note that this
input may actually have ndims>3; it is just assumed that the last
two dimensions have size 3, representing the matrix.
nonorthogonal: bool, optional
If scipy.linalg is available, use the more robust algorithm of
Bar-Itzhack. Default value is True.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-2].
Raises
------
LinAlgError
If any of the eigenvalue solutions does not converge
"""
try:
from scipy import linalg
except ImportError:
linalg = False
rot = np.array(rot, copy=False)
shape = rot.shape[:-2]
if linalg and nonorthogonal:
from operator import mul
from functools import reduce
K3 = np.empty(shape+(4, 4))
K3[..., 0, 0] = (rot[..., 0, 0] - rot[..., 1, 1] - rot[..., 2, 2])/3.0
K3[..., 0, 1] = (rot[..., 1, 0] + rot[..., 0, 1])/3.0
K3[..., 0, 2] = (rot[..., 2, 0] + rot[..., 0, 2])/3.0
K3[..., 0, 3] = (rot[..., 1, 2] - rot[..., 2, 1])/3.0
K3[..., 1, 0] = K3[..., 0, 1]
K3[..., 1, 1] = (rot[..., 1, 1] - rot[..., 0, 0] - rot[..., 2, 2])/3.0
K3[..., 1, 2] = (rot[..., 2, 1] + rot[..., 1, 2])/3.0
K3[..., 1, 3] = (rot[..., 2, 0] - rot[..., 0, 2])/3.0
K3[..., 2, 0] = K3[..., 0, 2]
K3[..., 2, 1] = K3[..., 1, 2]
K3[..., 2, 2] = (rot[..., 2, 2] - rot[..., 0, 0] - rot[..., 1, 1])/3.0
K3[..., 2, 3] = (rot[..., 0, 1] - rot[..., 1, 0])/3.0
K3[..., 3, 0] = K3[..., 0, 3]
K3[..., 3, 1] = K3[..., 1, 3]
K3[..., 3, 2] = K3[..., 2, 3]
K3[..., 3, 3] = (rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2])/3.0
if not shape:
q = zero.copy()
eigvals, eigvecs = linalg.eigh(K3.T, eigvals=(3, 3))
q.components[0] = eigvecs[-1]
q.components[1:] = -eigvecs[:-1].flatten()
return q
else:
q = np.empty(shape+(4,), dtype=np.float)
for flat_index in range(reduce(mul, shape)):
multi_index = np.unravel_index(flat_index, shape)
eigvals, eigvecs = linalg.eigh(K3[multi_index], eigvals=(3, 3))
q[multi_index+(0,)] = eigvecs[-1]
q[multi_index+(slice(1,None),)] = -eigvecs[:-1].flatten()
return as_quat_array(q)
else: # No scipy.linalg or not `nonorthogonal`
diagonals = np.empty(shape+(4,))
diagonals[..., 0] = rot[..., 0, 0]
diagonals[..., 1] = rot[..., 1, 1]
diagonals[..., 2] = rot[..., 2, 2]
diagonals[..., 3] = rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2]
indices = np.argmax(diagonals, axis=-1)
q = diagonals # reuse storage space
indices_i = (indices == 0)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 1] = 1 + rot_i[..., 0, 0] - rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 2] = rot_i[..., 0, 1] + rot_i[..., 1, 0]
q[indices_i, 3] = rot_i[..., 0, 2] + rot_i[..., 2, 0]
indices_i = (indices == 1)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 1] = rot_i[..., 1, 0] + rot_i[..., 0, 1]
q[indices_i, 2] = 1 - rot_i[..., 0, 0] + rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 3] = rot_i[..., 1, 2] + rot_i[..., 2, 1]
indices_i = (indices == 2)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q[indices_i, 1] = rot_i[..., 2, 0] + rot_i[..., 0, 2]
q[indices_i, 2] = rot_i[..., 2, 1] + rot_i[..., 1, 2]
q[indices_i, 3] = 1 - rot_i[..., 0, 0] - rot_i[..., 1, 1] + rot_i[..., 2, 2]
indices_i = (indices == 3)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = 1 + rot_i[..., 0, 0] + rot_i[..., 1, 1] + rot_i[..., 2, 2]
q[indices_i, 1] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 2] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 3] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q /= np.linalg.norm(q, axis=-1)[..., np.newaxis]
return as_quat_array(q)
def as_rotation_vector(q):
"""Convert input quaternion to the axis-angle representation
Note that if any of the input quaternions has norm zero, no error is
raised, but NaNs will appear in the output.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,). Each vector represents the axis of
the rotation, with norm proportional to the angle of the rotation in
radians.
"""
return as_float_array(2*np.log(np.normalized(q)))[..., 1:]
def as_euler_angles(q):
"""Open Pandora's Box
If somebody is trying to make you use Euler angles, tell them no, and
walk away, and go and tell your mum.
You don't want to use Euler angles. They are awful. Stay away. It's
one thing to convert from Euler angles to quaternions; at least you're
moving in the right direction. But to go the other way?! It's just not
right.
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles are naturally in radians.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
alpha_beta_gamma: float array
Output shape is q.shape+(3,). These represent the angles (alpha,
beta, gamma) in radians, where the normalized input quaternion
represents `exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)`.
Raises
------
AllHell
...if you try to actually use Euler angles, when you could have
been using quaternions like a sensible person.
"""
alpha_beta_gamma = np.empty(q.shape + (3,), dtype=np.float)
n = np.norm(q)
q = as_float_array(q)
alpha_beta_gamma[..., 0] = np.arctan2(q[..., 3], q[..., 0]) + np.arctan2(-q[..., 1], q[..., 2])
alpha_beta_gamma[..., 1] = 2*np.arccos(np.sqrt((q[..., 0]**2 + q[..., 3]**2)/n))
alpha_beta_gamma[..., 2] = np.arctan2(q[..., 3], q[..., 0]) - np.arctan2(-q[..., 1], q[..., 2])
return alpha_beta_gamma
def from_euler_angles(alpha_beta_gamma, beta=None, gamma=None):
"""Improve your life drastically
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles naturally must be in radians for this to make any sense.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
alpha_beta_gamma: float or array of floats
This argument may either contain an array with last dimension of
size 3, where those three elements describe the (alpha, beta, gamma)
radian values for each rotation; or it may contain just the alpha
values, in which case the next two arguments must also be given.
beta: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and third arguments.
gamma: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and second arguments.
Returns
-------
R: quaternion array
The shape of this array will be the same as the input, except that
the last dimension will be removed.
"""
# Figure out the input angles from either type of input
if gamma is None:
alpha_beta_gamma = np.asarray(alpha_beta_gamma, dtype=np.double)
alpha = alpha_beta_gamma[..., 0]
beta = alpha_beta_gamma[..., 1]
gamma = alpha_beta_gamma[..., 2]
else:
alpha = np.asarray(alpha_beta_gamma, dtype=np.double)
beta = np.asarray(beta, dtype=np.double)
gamma = np.asarray(gamma, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(alpha, beta, gamma).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(beta/2)*np.cos((alpha+gamma)/2) # scalar quaternion components
R[..., 1] = -np.sin(beta/2)*np.sin((alpha-gamma)/2) # x quaternion components
R[..., 2] = np.sin(beta/2)*np.cos((alpha-gamma)/2) # y quaternion components
R[..., 3] = np.cos(beta/2)*np.sin((alpha+gamma)/2) # z quaternion components
return as_quat_array(R)
def as_spherical_coords(q):
"""Return the spherical coordinates corresponding to this quaternion
Obviously, spherical coordinates do not contain as much information as a
quaternion, so this function does lose some information. However, the
returned spherical coordinates will represent the point(s) on the sphere
to which the input quaternion(s) rotate the z axis.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must be nonzero
Returns
-------
vartheta_varphi: float array
Output shape is q.shape+(2,). These represent the angles (vartheta,
varphi) in radians, where the normalized input quaternion represents
`exp(varphi*z/2) * exp(vartheta*y/2)`, up to an arbitrary inital
rotation about `z`.
"""
return as_euler_angles(q)[..., 1::-1]
def from_spherical_coords(theta_phi, phi=None):
"""Return the quaternion corresponding to these spherical coordinates
Assumes the spherical coordinates correspond to the quaternion R via
R = exp(phi*z/2) * exp(theta*y/2)
The angles naturally must be in radians for this to make any sense.
Note that this quaternion rotates `z` onto the point with the given
spherical coordinates, but also rotates `x` and `y` onto the usual basis
vectors (theta and phi, respectively) at that point.
Parameters
----------
theta_phi: float or array of floats
This argument may either contain an array with last dimension of
size 2, where those two elements describe the (theta, phi) values in
radians for each point; or it may contain just the theta values in
radians, in which case the next argument must also be given.
phi: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first argument.
Returns
-------
R: quaternion array
If the second argument is not given to this function, the shape
will be the same as the input shape except for the last dimension,
which will be removed. If the second argument is given, this
output array will have the shape resulting from broadcasting the
two input arrays against each other.
"""
# Figure out the input angles from either type of input
if phi is None:
theta_phi = np.asarray(theta_phi, dtype=np.double)
theta = theta_phi[..., 0]
phi = theta_phi[..., 1]
else:
theta = np.asarray(theta_phi, dtype=np.double)
phi = np.asarray(phi, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(theta, phi).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(phi/2)*np.cos(theta/2) # scalar quaternion components
R[..., 1] = -np.sin(phi/2)*np.sin(theta/2) # x quaternion components
R[..., 2] = np.cos(phi/2)*np.sin(theta/2) # y quaternion components
R[..., 3] = np.sin(phi/2)*np.cos(theta/2) # z quaternion components
return as_quat_array(R)
def rotate_vectors(R, v, axis=-1):
"""Rotate vectors by given quaternions
For simplicity, this function simply converts the input
quaternion(s) to a matrix, and rotates the input vector(s) by the
usual matrix multiplication. However, it should be noted that if
each input quaternion is only used to rotate a single vector, it
is more efficient (in terms of operation counts) to use the
formula
v' = v + 2 * r x (s * v + r x v) / m
where x represents the cross product, s and r are the scalar and
vector parts of the quaternion, respectively, and m is the sum of
the squares of the components of the quaternion. If you are
looping over a very large number of quaternions, and just rotating
a single vector each time, you might want to implement that
alternative algorithm using numba (or something that doesn't use
python).
Parameters
==========
R: quaternion array
Quaternions by which to rotate the input vectors
v: float array
Three-vectors to be rotated.
axis: int
Axis of the `v` array to use as the vector dimension. This
axis of `v` must have length 3.
Returns
=======
vprime: float array
The rotated vectors. This array has shape R.shape+v.shape.
"""
R = np.asarray(R, dtype=np.quaternion)
v = np.asarray(v, dtype=float)
if v.ndim < 1 or 3 not in v.shape:
raise ValueError("Input `v` does not have at least one dimension of length 3")
if v.shape[axis] != 3:
raise ValueError("Input `v` axis {0} has length {1}, not 3.".format(axis, v.shape[axis]))
m = as_rotation_matrix(R)
m_axes = list(range(m.ndim))
v_axes = list(range(m.ndim, m.ndim+v.ndim))
mv_axes = list(v_axes)
mv_axes[axis] = m_axes[-2]
mv_axes = m_axes[:-2] + mv_axes
v_axes[axis] = m_axes[-1]
return np.einsum(m, m_axes, v, v_axes, mv_axes)
def isclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False):
"""
Returns a boolean array where two arrays are element-wise equal within a
tolerance.
This function is essentially a copy of the `numpy.isclose` function,
with different default tolerances and one minor changes necessary to
deal correctly with quaternions.
The tolerance values are positive, typically very small numbers. The
relative difference (`rtol` * abs(`b`)) and the absolute difference
`atol` are added together to compare against the absolute difference
between `a` and `b`.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
Returns
-------
y : array_like
Returns a boolean array of where `a` and `b` are equal within the
given tolerance. If both `a` and `b` are scalars, returns a single
boolean value.
See Also
--------
allclose
Notes
-----
For finite values, isclose uses the following equation to test whether
two floating point values are equivalent:
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`isclose(a, b)` might be different from `isclose(b, a)` in
some rare cases.
Examples
--------
>>> quaternion.isclose([1e10*quaternion.x, 1e-7*quaternion.y], [1.00001e10*quaternion.x, 1e-8*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, False])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.00001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, True])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.0001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([False, True])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y])
array([True, False])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y], equal_nan=True)
array([True, True])
"""
def within_tol(x, y, atol, rtol):
with np.errstate(invalid='ignore'):
result = np.less_equal(abs(x-y), atol + rtol * abs(y))
if np.isscalar(a) and np.isscalar(b):
result = bool(result)
return result
x = np.array(a, copy=False, subok=True, ndmin=1)
y = np.array(b, copy=False, subok=True, ndmin=1)
# Make sure y is an inexact type to avoid bad behavior on abs(MIN_INT).
# This will cause casting of x later. Also, make sure to allow subclasses
# (e.g., for numpy.ma).
try:
dt = np.result_type(y, 1.)
except TypeError:
dt = np.dtype(np.quaternion)
y = np.array(y, dtype=dt, copy=False, subok=True)
xfin = np.isfinite(x)
yfin = np.isfinite(y)
if np.all(xfin) and np.all(yfin):
return within_tol(x, y, atol, rtol)
else:
finite = xfin & yfin
cond = np.zeros_like(finite, subok=True)
# Because we're using boolean indexing, x & y must be the same shape.
# Ideally, we'd just do x, y = broadcast_arrays(x, y). It's in
# lib.stride_tricks, though, so we can't import it here.
x = x * np.ones_like(cond)
y = y * np.ones_like(cond)
# Avoid subtraction with infinite/nan values...
cond[finite] = within_tol(x[finite], y[finite], atol, rtol)
# Check for equality of infinite values...
cond[~finite] = (x[~finite] == y[~finite])
if equal_nan:
# Make NaN == NaN
both_nan = np.isnan(x) & np.isnan(y)
cond[both_nan] = both_nan[both_nan]
if np.isscalar(a) and np.isscalar(b):
return bool(cond)
else:
return cond
def allclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False, verbose=False):
"""
Returns True if two arrays are element-wise equal within a tolerance.
This function is essentially a wrapper for the `quaternion.isclose`
function, but returns a single boolean value of True if all elements
of the output from `quaternion.isclose` are True, and False otherwise.
This function also adds the option.
Note that this function has stricter tolerances than the
`numpy.allclose` function, as well as the additional `verbose` option.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
verbose : bool
If the return value is False,
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
See Also
--------
isclose, numpy.all, numpy.any, numpy.allclose
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
Notes
-----
If the following equation is element-wise True, then allclose returns
True.
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`allclose(a, b)` might be different from `allclose(b, a)` in
some rare cases.
"""
close = isclose(a, b, rtol=rtol, atol=atol, equal_nan=equal_nan)
result = np.all(close)
if verbose and not result:
print('Non-close values:')
for i in np.argwhere(close == False):
i = tuple(i)
print('\n x[{0}]={1}\n y[{0}]={2}'.format(i, a[i], b[i]))
return result
|
moble/quaternion
|
__init__.py
|
as_euler_angles
|
python
|
def as_euler_angles(q):
alpha_beta_gamma = np.empty(q.shape + (3,), dtype=np.float)
n = np.norm(q)
q = as_float_array(q)
alpha_beta_gamma[..., 0] = np.arctan2(q[..., 3], q[..., 0]) + np.arctan2(-q[..., 1], q[..., 2])
alpha_beta_gamma[..., 1] = 2*np.arccos(np.sqrt((q[..., 0]**2 + q[..., 3]**2)/n))
alpha_beta_gamma[..., 2] = np.arctan2(q[..., 3], q[..., 0]) - np.arctan2(-q[..., 1], q[..., 2])
return alpha_beta_gamma
|
Open Pandora's Box
If somebody is trying to make you use Euler angles, tell them no, and
walk away, and go and tell your mum.
You don't want to use Euler angles. They are awful. Stay away. It's
one thing to convert from Euler angles to quaternions; at least you're
moving in the right direction. But to go the other way?! It's just not
right.
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles are naturally in radians.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
alpha_beta_gamma: float array
Output shape is q.shape+(3,). These represent the angles (alpha,
beta, gamma) in radians, where the normalized input quaternion
represents `exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)`.
Raises
------
AllHell
...if you try to actually use Euler angles, when you could have
been using quaternions like a sensible person.
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/__init__.py#L374-L421
|
[
"def as_float_array(a):\n \"\"\"View the quaternion array as an array of floats\n\n This function is fast (of order 1 microsecond) because no data is\n copied; the returned quantity is just a \"view\" of the original.\n\n The output view has one more dimension (of size 4) than the input\n array, but is otherwise the same shape.\n\n \"\"\"\n return np.asarray(a, dtype=np.quaternion).view((np.double, 4))\n"
] |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import division, print_function, absolute_import
import numpy as np
from .numpy_quaternion import (quaternion, _eps,
slerp_evaluate, squad_evaluate,
# slerp_vectorized, squad_vectorized,
# slerp, squad,
)
from .quaternion_time_series import slerp, squad, integrate_angular_velocity, minimal_rotation
from .calculus import derivative, definite_integral, indefinite_integral
from .means import mean_rotor_in_chordal_metric, optimal_alignment_in_chordal_metric
from ._version import __version__
__doc_title__ = "Quaternion dtype for NumPy"
__doc__ = "Adds a quaternion dtype to NumPy."
__all__ = ['quaternion',
'as_quat_array', 'as_spinor_array',
'as_float_array', 'from_float_array',
'as_rotation_matrix', 'from_rotation_matrix',
'as_rotation_vector', 'from_rotation_vector',
'as_euler_angles', 'from_euler_angles',
'as_spherical_coords', 'from_spherical_coords',
'rotate_vectors', 'allclose',
'rotor_intrinsic_distance', 'rotor_chordal_distance',
'rotation_intrinsic_distance', 'rotation_chordal_distance',
'slerp_evaluate', 'squad_evaluate',
'zero', 'one', 'x', 'y', 'z', 'integrate_angular_velocity',
'squad', 'slerp', 'derivative', 'definite_integral', 'indefinite_integral']
if 'quaternion' in np.__dict__:
raise RuntimeError('The NumPy package already has a quaternion type')
np.quaternion = quaternion
np.typeDict['quaternion'] = np.dtype(quaternion)
zero = np.quaternion(0, 0, 0, 0)
one = np.quaternion(1, 0, 0, 0)
x = np.quaternion(0, 1, 0, 0)
y = np.quaternion(0, 0, 1, 0)
z = np.quaternion(0, 0, 0, 1)
rotor_intrinsic_distance = np.rotor_intrinsic_distance
rotor_chordal_distance = np.rotor_chordal_distance
rotation_intrinsic_distance = np.rotation_intrinsic_distance
rotation_chordal_distance = np.rotation_chordal_distance
def as_float_array(a):
"""View the quaternion array as an array of floats
This function is fast (of order 1 microsecond) because no data is
copied; the returned quantity is just a "view" of the original.
The output view has one more dimension (of size 4) than the input
array, but is otherwise the same shape.
"""
return np.asarray(a, dtype=np.quaternion).view((np.double, 4))
def as_quat_array(a):
"""View a float array as an array of quaternions
The input array must have a final dimension whose size is
divisible by four (or better yet *is* 4), because successive
indices in that last dimension will be considered successive
components of the output quaternion.
This function is usually fast (of order 1 microsecond) because no
data is copied; the returned quantity is just a "view" of the
original. However, if the input array is not C-contiguous
(basically, as you increment the index into the last dimension of
the array, you just move to the neighboring float in memory), the
data will need to be copied which may be quite slow. Therefore,
you should try to ensure that the input array is in that order.
Slices and transpositions will frequently break that rule.
We will not convert back from a two-spinor array because there is
no unique convention for them, so I don't want to mess with that.
Also, we want to discourage users from the slow, memory-copying
process of swapping columns required for useful definitions of
the two-spinors.
"""
a = np.asarray(a, dtype=np.double)
# fast path
if a.shape == (4,):
return quaternion(a[0], a[1], a[2], a[3])
# view only works if the last axis is C-contiguous
if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:
a = a.copy(order='C')
try:
av = a.view(np.quaternion)
except ValueError as e:
message = (str(e) + '\n '
+ 'Failed to view input data as a series of quaternions. '
+ 'Please ensure that the last dimension has size divisible by 4.\n '
+ 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))
raise ValueError(message)
# special case: don't create an axis for a single quaternion, to
# match the output of `as_float_array`
if av.shape[-1] == 1:
av = av.reshape(a.shape[:-1])
return av
def from_float_array(a):
return as_quat_array(a)
def as_spinor_array(a):
"""View a quaternion array as spinors in two-complex representation
This function is relatively slow and scales poorly, because memory
copying is apparently involved -- I think it's due to the
"advanced indexing" required to swap the columns.
"""
a = np.atleast_1d(a)
assert a.dtype == np.dtype(np.quaternion)
# I'm not sure why it has to be so complicated, but all of these steps
# appear to be necessary in this case.
return a.view(np.float).reshape(a.shape + (4,))[..., [0, 3, 2, 1]].ravel().view(np.complex).reshape(a.shape + (2,))
def as_rotation_matrix(q):
"""Convert input quaternion to 3x3 rotation matrix
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,3). This matrix should multiply (from
the left) a column vector to produce the rotated column vector.
Raises
------
ZeroDivisionError
If any of the input quaternions have norm 0.0.
"""
if q.shape == () and not isinstance(q, np.ndarray): # This is just a single quaternion
n = q.norm()
if n == 0.0:
raise ZeroDivisionError("Input to `as_rotation_matrix({0})` has zero norm".format(q))
elif abs(n-1.0) < _eps: # Input q is basically normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2), 2*(q.x*q.y - q.z*q.w), 2*(q.x*q.z + q.y*q.w)],
[2*(q.x*q.y + q.z*q.w), 1 - 2*(q.x**2 + q.z**2), 2*(q.y*q.z - q.x*q.w)],
[2*(q.x*q.z - q.y*q.w), 2*(q.y*q.z + q.x*q.w), 1 - 2*(q.x**2 + q.y**2)]
])
else: # Input q is not normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2)/n, 2*(q.x*q.y - q.z*q.w)/n, 2*(q.x*q.z + q.y*q.w)/n],
[2*(q.x*q.y + q.z*q.w)/n, 1 - 2*(q.x**2 + q.z**2)/n, 2*(q.y*q.z - q.x*q.w)/n],
[2*(q.x*q.z - q.y*q.w)/n, 2*(q.y*q.z + q.x*q.w)/n, 1 - 2*(q.x**2 + q.y**2)/n]
])
else: # This is an array of quaternions
n = np.norm(q)
if np.any(n == 0.0):
raise ZeroDivisionError("Array input to `as_rotation_matrix` has at least one element with zero norm")
else: # Assume input q is not normalized
m = np.empty(q.shape + (3, 3))
q = as_float_array(q)
m[..., 0, 0] = 1.0 - 2*(q[..., 2]**2 + q[..., 3]**2)/n
m[..., 0, 1] = 2*(q[..., 1]*q[..., 2] - q[..., 3]*q[..., 0])/n
m[..., 0, 2] = 2*(q[..., 1]*q[..., 3] + q[..., 2]*q[..., 0])/n
m[..., 1, 0] = 2*(q[..., 1]*q[..., 2] + q[..., 3]*q[..., 0])/n
m[..., 1, 1] = 1.0 - 2*(q[..., 1]**2 + q[..., 3]**2)/n
m[..., 1, 2] = 2*(q[..., 2]*q[..., 3] - q[..., 1]*q[..., 0])/n
m[..., 2, 0] = 2*(q[..., 1]*q[..., 3] - q[..., 2]*q[..., 0])/n
m[..., 2, 1] = 2*(q[..., 2]*q[..., 3] + q[..., 1]*q[..., 0])/n
m[..., 2, 2] = 1.0 - 2*(q[..., 1]**2 + q[..., 2]**2)/n
return m
def from_rotation_matrix(rot, nonorthogonal=True):
"""Convert input 3x3 rotation matrix to unit quaternion
By default, if scipy.linalg is available, this function uses
Bar-Itzhack's algorithm to allow for non-orthogonal matrices.
[J. Guidance, Vol. 23, No. 6, p. 1085 <http://dx.doi.org/10.2514/2.4654>]
This will almost certainly be quite a bit slower than simpler versions,
though it will be more robust to numerical errors in the rotation matrix.
Also note that Bar-Itzhack uses some pretty weird conventions. The last
component of the quaternion appears to represent the scalar, and the
quaternion itself is conjugated relative to the convention used
throughout this module.
If scipy.linalg is not available or if the optional
`nonorthogonal` parameter is set to `False`, this function falls
back to the possibly faster, but less robust, algorithm of Markley
[J. Guidance, Vol. 31, No. 2, p. 440
<http://dx.doi.org/10.2514/1.31730>].
Parameters
----------
rot: (...Nx3x3) float array
Each 3x3 matrix represents a rotation by multiplying (from the left)
a column vector to produce a rotated column vector. Note that this
input may actually have ndims>3; it is just assumed that the last
two dimensions have size 3, representing the matrix.
nonorthogonal: bool, optional
If scipy.linalg is available, use the more robust algorithm of
Bar-Itzhack. Default value is True.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-2].
Raises
------
LinAlgError
If any of the eigenvalue solutions does not converge
"""
try:
from scipy import linalg
except ImportError:
linalg = False
rot = np.array(rot, copy=False)
shape = rot.shape[:-2]
if linalg and nonorthogonal:
from operator import mul
from functools import reduce
K3 = np.empty(shape+(4, 4))
K3[..., 0, 0] = (rot[..., 0, 0] - rot[..., 1, 1] - rot[..., 2, 2])/3.0
K3[..., 0, 1] = (rot[..., 1, 0] + rot[..., 0, 1])/3.0
K3[..., 0, 2] = (rot[..., 2, 0] + rot[..., 0, 2])/3.0
K3[..., 0, 3] = (rot[..., 1, 2] - rot[..., 2, 1])/3.0
K3[..., 1, 0] = K3[..., 0, 1]
K3[..., 1, 1] = (rot[..., 1, 1] - rot[..., 0, 0] - rot[..., 2, 2])/3.0
K3[..., 1, 2] = (rot[..., 2, 1] + rot[..., 1, 2])/3.0
K3[..., 1, 3] = (rot[..., 2, 0] - rot[..., 0, 2])/3.0
K3[..., 2, 0] = K3[..., 0, 2]
K3[..., 2, 1] = K3[..., 1, 2]
K3[..., 2, 2] = (rot[..., 2, 2] - rot[..., 0, 0] - rot[..., 1, 1])/3.0
K3[..., 2, 3] = (rot[..., 0, 1] - rot[..., 1, 0])/3.0
K3[..., 3, 0] = K3[..., 0, 3]
K3[..., 3, 1] = K3[..., 1, 3]
K3[..., 3, 2] = K3[..., 2, 3]
K3[..., 3, 3] = (rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2])/3.0
if not shape:
q = zero.copy()
eigvals, eigvecs = linalg.eigh(K3.T, eigvals=(3, 3))
q.components[0] = eigvecs[-1]
q.components[1:] = -eigvecs[:-1].flatten()
return q
else:
q = np.empty(shape+(4,), dtype=np.float)
for flat_index in range(reduce(mul, shape)):
multi_index = np.unravel_index(flat_index, shape)
eigvals, eigvecs = linalg.eigh(K3[multi_index], eigvals=(3, 3))
q[multi_index+(0,)] = eigvecs[-1]
q[multi_index+(slice(1,None),)] = -eigvecs[:-1].flatten()
return as_quat_array(q)
else: # No scipy.linalg or not `nonorthogonal`
diagonals = np.empty(shape+(4,))
diagonals[..., 0] = rot[..., 0, 0]
diagonals[..., 1] = rot[..., 1, 1]
diagonals[..., 2] = rot[..., 2, 2]
diagonals[..., 3] = rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2]
indices = np.argmax(diagonals, axis=-1)
q = diagonals # reuse storage space
indices_i = (indices == 0)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 1] = 1 + rot_i[..., 0, 0] - rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 2] = rot_i[..., 0, 1] + rot_i[..., 1, 0]
q[indices_i, 3] = rot_i[..., 0, 2] + rot_i[..., 2, 0]
indices_i = (indices == 1)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 1] = rot_i[..., 1, 0] + rot_i[..., 0, 1]
q[indices_i, 2] = 1 - rot_i[..., 0, 0] + rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 3] = rot_i[..., 1, 2] + rot_i[..., 2, 1]
indices_i = (indices == 2)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q[indices_i, 1] = rot_i[..., 2, 0] + rot_i[..., 0, 2]
q[indices_i, 2] = rot_i[..., 2, 1] + rot_i[..., 1, 2]
q[indices_i, 3] = 1 - rot_i[..., 0, 0] - rot_i[..., 1, 1] + rot_i[..., 2, 2]
indices_i = (indices == 3)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = 1 + rot_i[..., 0, 0] + rot_i[..., 1, 1] + rot_i[..., 2, 2]
q[indices_i, 1] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 2] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 3] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q /= np.linalg.norm(q, axis=-1)[..., np.newaxis]
return as_quat_array(q)
def as_rotation_vector(q):
"""Convert input quaternion to the axis-angle representation
Note that if any of the input quaternions has norm zero, no error is
raised, but NaNs will appear in the output.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,). Each vector represents the axis of
the rotation, with norm proportional to the angle of the rotation in
radians.
"""
return as_float_array(2*np.log(np.normalized(q)))[..., 1:]
def from_rotation_vector(rot):
"""Convert input 3-vector in axis-angle representation to unit quaternion
Parameters
----------
rot: (Nx3) float array
Each vector represents the axis of the rotation, with norm
proportional to the angle of the rotation in radians.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-1].
"""
rot = np.array(rot, copy=False)
quats = np.zeros(rot.shape[:-1]+(4,))
quats[..., 1:] = rot[...]/2
quats = as_quat_array(quats)
return np.exp(quats)
def from_euler_angles(alpha_beta_gamma, beta=None, gamma=None):
"""Improve your life drastically
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles naturally must be in radians for this to make any sense.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
alpha_beta_gamma: float or array of floats
This argument may either contain an array with last dimension of
size 3, where those three elements describe the (alpha, beta, gamma)
radian values for each rotation; or it may contain just the alpha
values, in which case the next two arguments must also be given.
beta: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and third arguments.
gamma: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and second arguments.
Returns
-------
R: quaternion array
The shape of this array will be the same as the input, except that
the last dimension will be removed.
"""
# Figure out the input angles from either type of input
if gamma is None:
alpha_beta_gamma = np.asarray(alpha_beta_gamma, dtype=np.double)
alpha = alpha_beta_gamma[..., 0]
beta = alpha_beta_gamma[..., 1]
gamma = alpha_beta_gamma[..., 2]
else:
alpha = np.asarray(alpha_beta_gamma, dtype=np.double)
beta = np.asarray(beta, dtype=np.double)
gamma = np.asarray(gamma, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(alpha, beta, gamma).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(beta/2)*np.cos((alpha+gamma)/2) # scalar quaternion components
R[..., 1] = -np.sin(beta/2)*np.sin((alpha-gamma)/2) # x quaternion components
R[..., 2] = np.sin(beta/2)*np.cos((alpha-gamma)/2) # y quaternion components
R[..., 3] = np.cos(beta/2)*np.sin((alpha+gamma)/2) # z quaternion components
return as_quat_array(R)
def as_spherical_coords(q):
"""Return the spherical coordinates corresponding to this quaternion
Obviously, spherical coordinates do not contain as much information as a
quaternion, so this function does lose some information. However, the
returned spherical coordinates will represent the point(s) on the sphere
to which the input quaternion(s) rotate the z axis.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must be nonzero
Returns
-------
vartheta_varphi: float array
Output shape is q.shape+(2,). These represent the angles (vartheta,
varphi) in radians, where the normalized input quaternion represents
`exp(varphi*z/2) * exp(vartheta*y/2)`, up to an arbitrary inital
rotation about `z`.
"""
return as_euler_angles(q)[..., 1::-1]
def from_spherical_coords(theta_phi, phi=None):
"""Return the quaternion corresponding to these spherical coordinates
Assumes the spherical coordinates correspond to the quaternion R via
R = exp(phi*z/2) * exp(theta*y/2)
The angles naturally must be in radians for this to make any sense.
Note that this quaternion rotates `z` onto the point with the given
spherical coordinates, but also rotates `x` and `y` onto the usual basis
vectors (theta and phi, respectively) at that point.
Parameters
----------
theta_phi: float or array of floats
This argument may either contain an array with last dimension of
size 2, where those two elements describe the (theta, phi) values in
radians for each point; or it may contain just the theta values in
radians, in which case the next argument must also be given.
phi: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first argument.
Returns
-------
R: quaternion array
If the second argument is not given to this function, the shape
will be the same as the input shape except for the last dimension,
which will be removed. If the second argument is given, this
output array will have the shape resulting from broadcasting the
two input arrays against each other.
"""
# Figure out the input angles from either type of input
if phi is None:
theta_phi = np.asarray(theta_phi, dtype=np.double)
theta = theta_phi[..., 0]
phi = theta_phi[..., 1]
else:
theta = np.asarray(theta_phi, dtype=np.double)
phi = np.asarray(phi, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(theta, phi).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(phi/2)*np.cos(theta/2) # scalar quaternion components
R[..., 1] = -np.sin(phi/2)*np.sin(theta/2) # x quaternion components
R[..., 2] = np.cos(phi/2)*np.sin(theta/2) # y quaternion components
R[..., 3] = np.sin(phi/2)*np.cos(theta/2) # z quaternion components
return as_quat_array(R)
def rotate_vectors(R, v, axis=-1):
"""Rotate vectors by given quaternions
For simplicity, this function simply converts the input
quaternion(s) to a matrix, and rotates the input vector(s) by the
usual matrix multiplication. However, it should be noted that if
each input quaternion is only used to rotate a single vector, it
is more efficient (in terms of operation counts) to use the
formula
v' = v + 2 * r x (s * v + r x v) / m
where x represents the cross product, s and r are the scalar and
vector parts of the quaternion, respectively, and m is the sum of
the squares of the components of the quaternion. If you are
looping over a very large number of quaternions, and just rotating
a single vector each time, you might want to implement that
alternative algorithm using numba (or something that doesn't use
python).
Parameters
==========
R: quaternion array
Quaternions by which to rotate the input vectors
v: float array
Three-vectors to be rotated.
axis: int
Axis of the `v` array to use as the vector dimension. This
axis of `v` must have length 3.
Returns
=======
vprime: float array
The rotated vectors. This array has shape R.shape+v.shape.
"""
R = np.asarray(R, dtype=np.quaternion)
v = np.asarray(v, dtype=float)
if v.ndim < 1 or 3 not in v.shape:
raise ValueError("Input `v` does not have at least one dimension of length 3")
if v.shape[axis] != 3:
raise ValueError("Input `v` axis {0} has length {1}, not 3.".format(axis, v.shape[axis]))
m = as_rotation_matrix(R)
m_axes = list(range(m.ndim))
v_axes = list(range(m.ndim, m.ndim+v.ndim))
mv_axes = list(v_axes)
mv_axes[axis] = m_axes[-2]
mv_axes = m_axes[:-2] + mv_axes
v_axes[axis] = m_axes[-1]
return np.einsum(m, m_axes, v, v_axes, mv_axes)
def isclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False):
"""
Returns a boolean array where two arrays are element-wise equal within a
tolerance.
This function is essentially a copy of the `numpy.isclose` function,
with different default tolerances and one minor changes necessary to
deal correctly with quaternions.
The tolerance values are positive, typically very small numbers. The
relative difference (`rtol` * abs(`b`)) and the absolute difference
`atol` are added together to compare against the absolute difference
between `a` and `b`.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
Returns
-------
y : array_like
Returns a boolean array of where `a` and `b` are equal within the
given tolerance. If both `a` and `b` are scalars, returns a single
boolean value.
See Also
--------
allclose
Notes
-----
For finite values, isclose uses the following equation to test whether
two floating point values are equivalent:
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`isclose(a, b)` might be different from `isclose(b, a)` in
some rare cases.
Examples
--------
>>> quaternion.isclose([1e10*quaternion.x, 1e-7*quaternion.y], [1.00001e10*quaternion.x, 1e-8*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, False])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.00001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, True])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.0001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([False, True])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y])
array([True, False])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y], equal_nan=True)
array([True, True])
"""
def within_tol(x, y, atol, rtol):
with np.errstate(invalid='ignore'):
result = np.less_equal(abs(x-y), atol + rtol * abs(y))
if np.isscalar(a) and np.isscalar(b):
result = bool(result)
return result
x = np.array(a, copy=False, subok=True, ndmin=1)
y = np.array(b, copy=False, subok=True, ndmin=1)
# Make sure y is an inexact type to avoid bad behavior on abs(MIN_INT).
# This will cause casting of x later. Also, make sure to allow subclasses
# (e.g., for numpy.ma).
try:
dt = np.result_type(y, 1.)
except TypeError:
dt = np.dtype(np.quaternion)
y = np.array(y, dtype=dt, copy=False, subok=True)
xfin = np.isfinite(x)
yfin = np.isfinite(y)
if np.all(xfin) and np.all(yfin):
return within_tol(x, y, atol, rtol)
else:
finite = xfin & yfin
cond = np.zeros_like(finite, subok=True)
# Because we're using boolean indexing, x & y must be the same shape.
# Ideally, we'd just do x, y = broadcast_arrays(x, y). It's in
# lib.stride_tricks, though, so we can't import it here.
x = x * np.ones_like(cond)
y = y * np.ones_like(cond)
# Avoid subtraction with infinite/nan values...
cond[finite] = within_tol(x[finite], y[finite], atol, rtol)
# Check for equality of infinite values...
cond[~finite] = (x[~finite] == y[~finite])
if equal_nan:
# Make NaN == NaN
both_nan = np.isnan(x) & np.isnan(y)
cond[both_nan] = both_nan[both_nan]
if np.isscalar(a) and np.isscalar(b):
return bool(cond)
else:
return cond
def allclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False, verbose=False):
"""
Returns True if two arrays are element-wise equal within a tolerance.
This function is essentially a wrapper for the `quaternion.isclose`
function, but returns a single boolean value of True if all elements
of the output from `quaternion.isclose` are True, and False otherwise.
This function also adds the option.
Note that this function has stricter tolerances than the
`numpy.allclose` function, as well as the additional `verbose` option.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
verbose : bool
If the return value is False,
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
See Also
--------
isclose, numpy.all, numpy.any, numpy.allclose
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
Notes
-----
If the following equation is element-wise True, then allclose returns
True.
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`allclose(a, b)` might be different from `allclose(b, a)` in
some rare cases.
"""
close = isclose(a, b, rtol=rtol, atol=atol, equal_nan=equal_nan)
result = np.all(close)
if verbose and not result:
print('Non-close values:')
for i in np.argwhere(close == False):
i = tuple(i)
print('\n x[{0}]={1}\n y[{0}]={2}'.format(i, a[i], b[i]))
return result
|
moble/quaternion
|
__init__.py
|
from_euler_angles
|
python
|
def from_euler_angles(alpha_beta_gamma, beta=None, gamma=None):
# Figure out the input angles from either type of input
if gamma is None:
alpha_beta_gamma = np.asarray(alpha_beta_gamma, dtype=np.double)
alpha = alpha_beta_gamma[..., 0]
beta = alpha_beta_gamma[..., 1]
gamma = alpha_beta_gamma[..., 2]
else:
alpha = np.asarray(alpha_beta_gamma, dtype=np.double)
beta = np.asarray(beta, dtype=np.double)
gamma = np.asarray(gamma, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(alpha, beta, gamma).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(beta/2)*np.cos((alpha+gamma)/2) # scalar quaternion components
R[..., 1] = -np.sin(beta/2)*np.sin((alpha-gamma)/2) # x quaternion components
R[..., 2] = np.sin(beta/2)*np.cos((alpha-gamma)/2) # y quaternion components
R[..., 3] = np.cos(beta/2)*np.sin((alpha+gamma)/2) # z quaternion components
return as_quat_array(R)
|
Improve your life drastically
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles naturally must be in radians for this to make any sense.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
alpha_beta_gamma: float or array of floats
This argument may either contain an array with last dimension of
size 3, where those three elements describe the (alpha, beta, gamma)
radian values for each rotation; or it may contain just the alpha
values, in which case the next two arguments must also be given.
beta: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and third arguments.
gamma: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and second arguments.
Returns
-------
R: quaternion array
The shape of this array will be the same as the input, except that
the last dimension will be removed.
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/__init__.py#L424-L479
|
[
"def as_quat_array(a):\n \"\"\"View a float array as an array of quaternions\n\n The input array must have a final dimension whose size is\n divisible by four (or better yet *is* 4), because successive\n indices in that last dimension will be considered successive\n components of the output quaternion.\n\n This function is usually fast (of order 1 microsecond) because no\n data is copied; the returned quantity is just a \"view\" of the\n original. However, if the input array is not C-contiguous\n (basically, as you increment the index into the last dimension of\n the array, you just move to the neighboring float in memory), the\n data will need to be copied which may be quite slow. Therefore,\n you should try to ensure that the input array is in that order.\n Slices and transpositions will frequently break that rule.\n\n We will not convert back from a two-spinor array because there is\n no unique convention for them, so I don't want to mess with that.\n Also, we want to discourage users from the slow, memory-copying\n process of swapping columns required for useful definitions of\n the two-spinors.\n\n \"\"\"\n a = np.asarray(a, dtype=np.double)\n\n # fast path\n if a.shape == (4,):\n return quaternion(a[0], a[1], a[2], a[3])\n\n # view only works if the last axis is C-contiguous\n if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:\n a = a.copy(order='C')\n try:\n av = a.view(np.quaternion)\n except ValueError as e:\n message = (str(e) + '\\n '\n + 'Failed to view input data as a series of quaternions. '\n + 'Please ensure that the last dimension has size divisible by 4.\\n '\n + 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))\n raise ValueError(message)\n\n # special case: don't create an axis for a single quaternion, to\n # match the output of `as_float_array`\n if av.shape[-1] == 1:\n av = av.reshape(a.shape[:-1])\n\n return av\n"
] |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import division, print_function, absolute_import
import numpy as np
from .numpy_quaternion import (quaternion, _eps,
slerp_evaluate, squad_evaluate,
# slerp_vectorized, squad_vectorized,
# slerp, squad,
)
from .quaternion_time_series import slerp, squad, integrate_angular_velocity, minimal_rotation
from .calculus import derivative, definite_integral, indefinite_integral
from .means import mean_rotor_in_chordal_metric, optimal_alignment_in_chordal_metric
from ._version import __version__
__doc_title__ = "Quaternion dtype for NumPy"
__doc__ = "Adds a quaternion dtype to NumPy."
__all__ = ['quaternion',
'as_quat_array', 'as_spinor_array',
'as_float_array', 'from_float_array',
'as_rotation_matrix', 'from_rotation_matrix',
'as_rotation_vector', 'from_rotation_vector',
'as_euler_angles', 'from_euler_angles',
'as_spherical_coords', 'from_spherical_coords',
'rotate_vectors', 'allclose',
'rotor_intrinsic_distance', 'rotor_chordal_distance',
'rotation_intrinsic_distance', 'rotation_chordal_distance',
'slerp_evaluate', 'squad_evaluate',
'zero', 'one', 'x', 'y', 'z', 'integrate_angular_velocity',
'squad', 'slerp', 'derivative', 'definite_integral', 'indefinite_integral']
if 'quaternion' in np.__dict__:
raise RuntimeError('The NumPy package already has a quaternion type')
np.quaternion = quaternion
np.typeDict['quaternion'] = np.dtype(quaternion)
zero = np.quaternion(0, 0, 0, 0)
one = np.quaternion(1, 0, 0, 0)
x = np.quaternion(0, 1, 0, 0)
y = np.quaternion(0, 0, 1, 0)
z = np.quaternion(0, 0, 0, 1)
rotor_intrinsic_distance = np.rotor_intrinsic_distance
rotor_chordal_distance = np.rotor_chordal_distance
rotation_intrinsic_distance = np.rotation_intrinsic_distance
rotation_chordal_distance = np.rotation_chordal_distance
def as_float_array(a):
"""View the quaternion array as an array of floats
This function is fast (of order 1 microsecond) because no data is
copied; the returned quantity is just a "view" of the original.
The output view has one more dimension (of size 4) than the input
array, but is otherwise the same shape.
"""
return np.asarray(a, dtype=np.quaternion).view((np.double, 4))
def as_quat_array(a):
"""View a float array as an array of quaternions
The input array must have a final dimension whose size is
divisible by four (or better yet *is* 4), because successive
indices in that last dimension will be considered successive
components of the output quaternion.
This function is usually fast (of order 1 microsecond) because no
data is copied; the returned quantity is just a "view" of the
original. However, if the input array is not C-contiguous
(basically, as you increment the index into the last dimension of
the array, you just move to the neighboring float in memory), the
data will need to be copied which may be quite slow. Therefore,
you should try to ensure that the input array is in that order.
Slices and transpositions will frequently break that rule.
We will not convert back from a two-spinor array because there is
no unique convention for them, so I don't want to mess with that.
Also, we want to discourage users from the slow, memory-copying
process of swapping columns required for useful definitions of
the two-spinors.
"""
a = np.asarray(a, dtype=np.double)
# fast path
if a.shape == (4,):
return quaternion(a[0], a[1], a[2], a[3])
# view only works if the last axis is C-contiguous
if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:
a = a.copy(order='C')
try:
av = a.view(np.quaternion)
except ValueError as e:
message = (str(e) + '\n '
+ 'Failed to view input data as a series of quaternions. '
+ 'Please ensure that the last dimension has size divisible by 4.\n '
+ 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))
raise ValueError(message)
# special case: don't create an axis for a single quaternion, to
# match the output of `as_float_array`
if av.shape[-1] == 1:
av = av.reshape(a.shape[:-1])
return av
def from_float_array(a):
return as_quat_array(a)
def as_spinor_array(a):
"""View a quaternion array as spinors in two-complex representation
This function is relatively slow and scales poorly, because memory
copying is apparently involved -- I think it's due to the
"advanced indexing" required to swap the columns.
"""
a = np.atleast_1d(a)
assert a.dtype == np.dtype(np.quaternion)
# I'm not sure why it has to be so complicated, but all of these steps
# appear to be necessary in this case.
return a.view(np.float).reshape(a.shape + (4,))[..., [0, 3, 2, 1]].ravel().view(np.complex).reshape(a.shape + (2,))
def as_rotation_matrix(q):
"""Convert input quaternion to 3x3 rotation matrix
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,3). This matrix should multiply (from
the left) a column vector to produce the rotated column vector.
Raises
------
ZeroDivisionError
If any of the input quaternions have norm 0.0.
"""
if q.shape == () and not isinstance(q, np.ndarray): # This is just a single quaternion
n = q.norm()
if n == 0.0:
raise ZeroDivisionError("Input to `as_rotation_matrix({0})` has zero norm".format(q))
elif abs(n-1.0) < _eps: # Input q is basically normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2), 2*(q.x*q.y - q.z*q.w), 2*(q.x*q.z + q.y*q.w)],
[2*(q.x*q.y + q.z*q.w), 1 - 2*(q.x**2 + q.z**2), 2*(q.y*q.z - q.x*q.w)],
[2*(q.x*q.z - q.y*q.w), 2*(q.y*q.z + q.x*q.w), 1 - 2*(q.x**2 + q.y**2)]
])
else: # Input q is not normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2)/n, 2*(q.x*q.y - q.z*q.w)/n, 2*(q.x*q.z + q.y*q.w)/n],
[2*(q.x*q.y + q.z*q.w)/n, 1 - 2*(q.x**2 + q.z**2)/n, 2*(q.y*q.z - q.x*q.w)/n],
[2*(q.x*q.z - q.y*q.w)/n, 2*(q.y*q.z + q.x*q.w)/n, 1 - 2*(q.x**2 + q.y**2)/n]
])
else: # This is an array of quaternions
n = np.norm(q)
if np.any(n == 0.0):
raise ZeroDivisionError("Array input to `as_rotation_matrix` has at least one element with zero norm")
else: # Assume input q is not normalized
m = np.empty(q.shape + (3, 3))
q = as_float_array(q)
m[..., 0, 0] = 1.0 - 2*(q[..., 2]**2 + q[..., 3]**2)/n
m[..., 0, 1] = 2*(q[..., 1]*q[..., 2] - q[..., 3]*q[..., 0])/n
m[..., 0, 2] = 2*(q[..., 1]*q[..., 3] + q[..., 2]*q[..., 0])/n
m[..., 1, 0] = 2*(q[..., 1]*q[..., 2] + q[..., 3]*q[..., 0])/n
m[..., 1, 1] = 1.0 - 2*(q[..., 1]**2 + q[..., 3]**2)/n
m[..., 1, 2] = 2*(q[..., 2]*q[..., 3] - q[..., 1]*q[..., 0])/n
m[..., 2, 0] = 2*(q[..., 1]*q[..., 3] - q[..., 2]*q[..., 0])/n
m[..., 2, 1] = 2*(q[..., 2]*q[..., 3] + q[..., 1]*q[..., 0])/n
m[..., 2, 2] = 1.0 - 2*(q[..., 1]**2 + q[..., 2]**2)/n
return m
def from_rotation_matrix(rot, nonorthogonal=True):
"""Convert input 3x3 rotation matrix to unit quaternion
By default, if scipy.linalg is available, this function uses
Bar-Itzhack's algorithm to allow for non-orthogonal matrices.
[J. Guidance, Vol. 23, No. 6, p. 1085 <http://dx.doi.org/10.2514/2.4654>]
This will almost certainly be quite a bit slower than simpler versions,
though it will be more robust to numerical errors in the rotation matrix.
Also note that Bar-Itzhack uses some pretty weird conventions. The last
component of the quaternion appears to represent the scalar, and the
quaternion itself is conjugated relative to the convention used
throughout this module.
If scipy.linalg is not available or if the optional
`nonorthogonal` parameter is set to `False`, this function falls
back to the possibly faster, but less robust, algorithm of Markley
[J. Guidance, Vol. 31, No. 2, p. 440
<http://dx.doi.org/10.2514/1.31730>].
Parameters
----------
rot: (...Nx3x3) float array
Each 3x3 matrix represents a rotation by multiplying (from the left)
a column vector to produce a rotated column vector. Note that this
input may actually have ndims>3; it is just assumed that the last
two dimensions have size 3, representing the matrix.
nonorthogonal: bool, optional
If scipy.linalg is available, use the more robust algorithm of
Bar-Itzhack. Default value is True.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-2].
Raises
------
LinAlgError
If any of the eigenvalue solutions does not converge
"""
try:
from scipy import linalg
except ImportError:
linalg = False
rot = np.array(rot, copy=False)
shape = rot.shape[:-2]
if linalg and nonorthogonal:
from operator import mul
from functools import reduce
K3 = np.empty(shape+(4, 4))
K3[..., 0, 0] = (rot[..., 0, 0] - rot[..., 1, 1] - rot[..., 2, 2])/3.0
K3[..., 0, 1] = (rot[..., 1, 0] + rot[..., 0, 1])/3.0
K3[..., 0, 2] = (rot[..., 2, 0] + rot[..., 0, 2])/3.0
K3[..., 0, 3] = (rot[..., 1, 2] - rot[..., 2, 1])/3.0
K3[..., 1, 0] = K3[..., 0, 1]
K3[..., 1, 1] = (rot[..., 1, 1] - rot[..., 0, 0] - rot[..., 2, 2])/3.0
K3[..., 1, 2] = (rot[..., 2, 1] + rot[..., 1, 2])/3.0
K3[..., 1, 3] = (rot[..., 2, 0] - rot[..., 0, 2])/3.0
K3[..., 2, 0] = K3[..., 0, 2]
K3[..., 2, 1] = K3[..., 1, 2]
K3[..., 2, 2] = (rot[..., 2, 2] - rot[..., 0, 0] - rot[..., 1, 1])/3.0
K3[..., 2, 3] = (rot[..., 0, 1] - rot[..., 1, 0])/3.0
K3[..., 3, 0] = K3[..., 0, 3]
K3[..., 3, 1] = K3[..., 1, 3]
K3[..., 3, 2] = K3[..., 2, 3]
K3[..., 3, 3] = (rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2])/3.0
if not shape:
q = zero.copy()
eigvals, eigvecs = linalg.eigh(K3.T, eigvals=(3, 3))
q.components[0] = eigvecs[-1]
q.components[1:] = -eigvecs[:-1].flatten()
return q
else:
q = np.empty(shape+(4,), dtype=np.float)
for flat_index in range(reduce(mul, shape)):
multi_index = np.unravel_index(flat_index, shape)
eigvals, eigvecs = linalg.eigh(K3[multi_index], eigvals=(3, 3))
q[multi_index+(0,)] = eigvecs[-1]
q[multi_index+(slice(1,None),)] = -eigvecs[:-1].flatten()
return as_quat_array(q)
else: # No scipy.linalg or not `nonorthogonal`
diagonals = np.empty(shape+(4,))
diagonals[..., 0] = rot[..., 0, 0]
diagonals[..., 1] = rot[..., 1, 1]
diagonals[..., 2] = rot[..., 2, 2]
diagonals[..., 3] = rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2]
indices = np.argmax(diagonals, axis=-1)
q = diagonals # reuse storage space
indices_i = (indices == 0)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 1] = 1 + rot_i[..., 0, 0] - rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 2] = rot_i[..., 0, 1] + rot_i[..., 1, 0]
q[indices_i, 3] = rot_i[..., 0, 2] + rot_i[..., 2, 0]
indices_i = (indices == 1)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 1] = rot_i[..., 1, 0] + rot_i[..., 0, 1]
q[indices_i, 2] = 1 - rot_i[..., 0, 0] + rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 3] = rot_i[..., 1, 2] + rot_i[..., 2, 1]
indices_i = (indices == 2)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q[indices_i, 1] = rot_i[..., 2, 0] + rot_i[..., 0, 2]
q[indices_i, 2] = rot_i[..., 2, 1] + rot_i[..., 1, 2]
q[indices_i, 3] = 1 - rot_i[..., 0, 0] - rot_i[..., 1, 1] + rot_i[..., 2, 2]
indices_i = (indices == 3)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = 1 + rot_i[..., 0, 0] + rot_i[..., 1, 1] + rot_i[..., 2, 2]
q[indices_i, 1] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 2] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 3] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q /= np.linalg.norm(q, axis=-1)[..., np.newaxis]
return as_quat_array(q)
def as_rotation_vector(q):
"""Convert input quaternion to the axis-angle representation
Note that if any of the input quaternions has norm zero, no error is
raised, but NaNs will appear in the output.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,). Each vector represents the axis of
the rotation, with norm proportional to the angle of the rotation in
radians.
"""
return as_float_array(2*np.log(np.normalized(q)))[..., 1:]
def from_rotation_vector(rot):
"""Convert input 3-vector in axis-angle representation to unit quaternion
Parameters
----------
rot: (Nx3) float array
Each vector represents the axis of the rotation, with norm
proportional to the angle of the rotation in radians.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-1].
"""
rot = np.array(rot, copy=False)
quats = np.zeros(rot.shape[:-1]+(4,))
quats[..., 1:] = rot[...]/2
quats = as_quat_array(quats)
return np.exp(quats)
def as_euler_angles(q):
"""Open Pandora's Box
If somebody is trying to make you use Euler angles, tell them no, and
walk away, and go and tell your mum.
You don't want to use Euler angles. They are awful. Stay away. It's
one thing to convert from Euler angles to quaternions; at least you're
moving in the right direction. But to go the other way?! It's just not
right.
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles are naturally in radians.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
alpha_beta_gamma: float array
Output shape is q.shape+(3,). These represent the angles (alpha,
beta, gamma) in radians, where the normalized input quaternion
represents `exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)`.
Raises
------
AllHell
...if you try to actually use Euler angles, when you could have
been using quaternions like a sensible person.
"""
alpha_beta_gamma = np.empty(q.shape + (3,), dtype=np.float)
n = np.norm(q)
q = as_float_array(q)
alpha_beta_gamma[..., 0] = np.arctan2(q[..., 3], q[..., 0]) + np.arctan2(-q[..., 1], q[..., 2])
alpha_beta_gamma[..., 1] = 2*np.arccos(np.sqrt((q[..., 0]**2 + q[..., 3]**2)/n))
alpha_beta_gamma[..., 2] = np.arctan2(q[..., 3], q[..., 0]) - np.arctan2(-q[..., 1], q[..., 2])
return alpha_beta_gamma
def as_spherical_coords(q):
"""Return the spherical coordinates corresponding to this quaternion
Obviously, spherical coordinates do not contain as much information as a
quaternion, so this function does lose some information. However, the
returned spherical coordinates will represent the point(s) on the sphere
to which the input quaternion(s) rotate the z axis.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must be nonzero
Returns
-------
vartheta_varphi: float array
Output shape is q.shape+(2,). These represent the angles (vartheta,
varphi) in radians, where the normalized input quaternion represents
`exp(varphi*z/2) * exp(vartheta*y/2)`, up to an arbitrary inital
rotation about `z`.
"""
return as_euler_angles(q)[..., 1::-1]
def from_spherical_coords(theta_phi, phi=None):
"""Return the quaternion corresponding to these spherical coordinates
Assumes the spherical coordinates correspond to the quaternion R via
R = exp(phi*z/2) * exp(theta*y/2)
The angles naturally must be in radians for this to make any sense.
Note that this quaternion rotates `z` onto the point with the given
spherical coordinates, but also rotates `x` and `y` onto the usual basis
vectors (theta and phi, respectively) at that point.
Parameters
----------
theta_phi: float or array of floats
This argument may either contain an array with last dimension of
size 2, where those two elements describe the (theta, phi) values in
radians for each point; or it may contain just the theta values in
radians, in which case the next argument must also be given.
phi: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first argument.
Returns
-------
R: quaternion array
If the second argument is not given to this function, the shape
will be the same as the input shape except for the last dimension,
which will be removed. If the second argument is given, this
output array will have the shape resulting from broadcasting the
two input arrays against each other.
"""
# Figure out the input angles from either type of input
if phi is None:
theta_phi = np.asarray(theta_phi, dtype=np.double)
theta = theta_phi[..., 0]
phi = theta_phi[..., 1]
else:
theta = np.asarray(theta_phi, dtype=np.double)
phi = np.asarray(phi, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(theta, phi).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(phi/2)*np.cos(theta/2) # scalar quaternion components
R[..., 1] = -np.sin(phi/2)*np.sin(theta/2) # x quaternion components
R[..., 2] = np.cos(phi/2)*np.sin(theta/2) # y quaternion components
R[..., 3] = np.sin(phi/2)*np.cos(theta/2) # z quaternion components
return as_quat_array(R)
def rotate_vectors(R, v, axis=-1):
"""Rotate vectors by given quaternions
For simplicity, this function simply converts the input
quaternion(s) to a matrix, and rotates the input vector(s) by the
usual matrix multiplication. However, it should be noted that if
each input quaternion is only used to rotate a single vector, it
is more efficient (in terms of operation counts) to use the
formula
v' = v + 2 * r x (s * v + r x v) / m
where x represents the cross product, s and r are the scalar and
vector parts of the quaternion, respectively, and m is the sum of
the squares of the components of the quaternion. If you are
looping over a very large number of quaternions, and just rotating
a single vector each time, you might want to implement that
alternative algorithm using numba (or something that doesn't use
python).
Parameters
==========
R: quaternion array
Quaternions by which to rotate the input vectors
v: float array
Three-vectors to be rotated.
axis: int
Axis of the `v` array to use as the vector dimension. This
axis of `v` must have length 3.
Returns
=======
vprime: float array
The rotated vectors. This array has shape R.shape+v.shape.
"""
R = np.asarray(R, dtype=np.quaternion)
v = np.asarray(v, dtype=float)
if v.ndim < 1 or 3 not in v.shape:
raise ValueError("Input `v` does not have at least one dimension of length 3")
if v.shape[axis] != 3:
raise ValueError("Input `v` axis {0} has length {1}, not 3.".format(axis, v.shape[axis]))
m = as_rotation_matrix(R)
m_axes = list(range(m.ndim))
v_axes = list(range(m.ndim, m.ndim+v.ndim))
mv_axes = list(v_axes)
mv_axes[axis] = m_axes[-2]
mv_axes = m_axes[:-2] + mv_axes
v_axes[axis] = m_axes[-1]
return np.einsum(m, m_axes, v, v_axes, mv_axes)
def isclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False):
"""
Returns a boolean array where two arrays are element-wise equal within a
tolerance.
This function is essentially a copy of the `numpy.isclose` function,
with different default tolerances and one minor changes necessary to
deal correctly with quaternions.
The tolerance values are positive, typically very small numbers. The
relative difference (`rtol` * abs(`b`)) and the absolute difference
`atol` are added together to compare against the absolute difference
between `a` and `b`.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
Returns
-------
y : array_like
Returns a boolean array of where `a` and `b` are equal within the
given tolerance. If both `a` and `b` are scalars, returns a single
boolean value.
See Also
--------
allclose
Notes
-----
For finite values, isclose uses the following equation to test whether
two floating point values are equivalent:
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`isclose(a, b)` might be different from `isclose(b, a)` in
some rare cases.
Examples
--------
>>> quaternion.isclose([1e10*quaternion.x, 1e-7*quaternion.y], [1.00001e10*quaternion.x, 1e-8*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, False])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.00001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, True])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.0001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([False, True])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y])
array([True, False])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y], equal_nan=True)
array([True, True])
"""
def within_tol(x, y, atol, rtol):
with np.errstate(invalid='ignore'):
result = np.less_equal(abs(x-y), atol + rtol * abs(y))
if np.isscalar(a) and np.isscalar(b):
result = bool(result)
return result
x = np.array(a, copy=False, subok=True, ndmin=1)
y = np.array(b, copy=False, subok=True, ndmin=1)
# Make sure y is an inexact type to avoid bad behavior on abs(MIN_INT).
# This will cause casting of x later. Also, make sure to allow subclasses
# (e.g., for numpy.ma).
try:
dt = np.result_type(y, 1.)
except TypeError:
dt = np.dtype(np.quaternion)
y = np.array(y, dtype=dt, copy=False, subok=True)
xfin = np.isfinite(x)
yfin = np.isfinite(y)
if np.all(xfin) and np.all(yfin):
return within_tol(x, y, atol, rtol)
else:
finite = xfin & yfin
cond = np.zeros_like(finite, subok=True)
# Because we're using boolean indexing, x & y must be the same shape.
# Ideally, we'd just do x, y = broadcast_arrays(x, y). It's in
# lib.stride_tricks, though, so we can't import it here.
x = x * np.ones_like(cond)
y = y * np.ones_like(cond)
# Avoid subtraction with infinite/nan values...
cond[finite] = within_tol(x[finite], y[finite], atol, rtol)
# Check for equality of infinite values...
cond[~finite] = (x[~finite] == y[~finite])
if equal_nan:
# Make NaN == NaN
both_nan = np.isnan(x) & np.isnan(y)
cond[both_nan] = both_nan[both_nan]
if np.isscalar(a) and np.isscalar(b):
return bool(cond)
else:
return cond
def allclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False, verbose=False):
"""
Returns True if two arrays are element-wise equal within a tolerance.
This function is essentially a wrapper for the `quaternion.isclose`
function, but returns a single boolean value of True if all elements
of the output from `quaternion.isclose` are True, and False otherwise.
This function also adds the option.
Note that this function has stricter tolerances than the
`numpy.allclose` function, as well as the additional `verbose` option.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
verbose : bool
If the return value is False,
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
See Also
--------
isclose, numpy.all, numpy.any, numpy.allclose
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
Notes
-----
If the following equation is element-wise True, then allclose returns
True.
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`allclose(a, b)` might be different from `allclose(b, a)` in
some rare cases.
"""
close = isclose(a, b, rtol=rtol, atol=atol, equal_nan=equal_nan)
result = np.all(close)
if verbose and not result:
print('Non-close values:')
for i in np.argwhere(close == False):
i = tuple(i)
print('\n x[{0}]={1}\n y[{0}]={2}'.format(i, a[i], b[i]))
return result
|
moble/quaternion
|
__init__.py
|
from_spherical_coords
|
python
|
def from_spherical_coords(theta_phi, phi=None):
# Figure out the input angles from either type of input
if phi is None:
theta_phi = np.asarray(theta_phi, dtype=np.double)
theta = theta_phi[..., 0]
phi = theta_phi[..., 1]
else:
theta = np.asarray(theta_phi, dtype=np.double)
phi = np.asarray(phi, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(theta, phi).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(phi/2)*np.cos(theta/2) # scalar quaternion components
R[..., 1] = -np.sin(phi/2)*np.sin(theta/2) # x quaternion components
R[..., 2] = np.cos(phi/2)*np.sin(theta/2) # y quaternion components
R[..., 3] = np.sin(phi/2)*np.cos(theta/2) # z quaternion components
return as_quat_array(R)
|
Return the quaternion corresponding to these spherical coordinates
Assumes the spherical coordinates correspond to the quaternion R via
R = exp(phi*z/2) * exp(theta*y/2)
The angles naturally must be in radians for this to make any sense.
Note that this quaternion rotates `z` onto the point with the given
spherical coordinates, but also rotates `x` and `y` onto the usual basis
vectors (theta and phi, respectively) at that point.
Parameters
----------
theta_phi: float or array of floats
This argument may either contain an array with last dimension of
size 2, where those two elements describe the (theta, phi) values in
radians for each point; or it may contain just the theta values in
radians, in which case the next argument must also be given.
phi: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first argument.
Returns
-------
R: quaternion array
If the second argument is not given to this function, the shape
will be the same as the input shape except for the last dimension,
which will be removed. If the second argument is given, this
output array will have the shape resulting from broadcasting the
two input arrays against each other.
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/__init__.py#L507-L559
|
[
"def as_quat_array(a):\n \"\"\"View a float array as an array of quaternions\n\n The input array must have a final dimension whose size is\n divisible by four (or better yet *is* 4), because successive\n indices in that last dimension will be considered successive\n components of the output quaternion.\n\n This function is usually fast (of order 1 microsecond) because no\n data is copied; the returned quantity is just a \"view\" of the\n original. However, if the input array is not C-contiguous\n (basically, as you increment the index into the last dimension of\n the array, you just move to the neighboring float in memory), the\n data will need to be copied which may be quite slow. Therefore,\n you should try to ensure that the input array is in that order.\n Slices and transpositions will frequently break that rule.\n\n We will not convert back from a two-spinor array because there is\n no unique convention for them, so I don't want to mess with that.\n Also, we want to discourage users from the slow, memory-copying\n process of swapping columns required for useful definitions of\n the two-spinors.\n\n \"\"\"\n a = np.asarray(a, dtype=np.double)\n\n # fast path\n if a.shape == (4,):\n return quaternion(a[0], a[1], a[2], a[3])\n\n # view only works if the last axis is C-contiguous\n if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:\n a = a.copy(order='C')\n try:\n av = a.view(np.quaternion)\n except ValueError as e:\n message = (str(e) + '\\n '\n + 'Failed to view input data as a series of quaternions. '\n + 'Please ensure that the last dimension has size divisible by 4.\\n '\n + 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))\n raise ValueError(message)\n\n # special case: don't create an axis for a single quaternion, to\n # match the output of `as_float_array`\n if av.shape[-1] == 1:\n av = av.reshape(a.shape[:-1])\n\n return av\n"
] |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import division, print_function, absolute_import
import numpy as np
from .numpy_quaternion import (quaternion, _eps,
slerp_evaluate, squad_evaluate,
# slerp_vectorized, squad_vectorized,
# slerp, squad,
)
from .quaternion_time_series import slerp, squad, integrate_angular_velocity, minimal_rotation
from .calculus import derivative, definite_integral, indefinite_integral
from .means import mean_rotor_in_chordal_metric, optimal_alignment_in_chordal_metric
from ._version import __version__
__doc_title__ = "Quaternion dtype for NumPy"
__doc__ = "Adds a quaternion dtype to NumPy."
__all__ = ['quaternion',
'as_quat_array', 'as_spinor_array',
'as_float_array', 'from_float_array',
'as_rotation_matrix', 'from_rotation_matrix',
'as_rotation_vector', 'from_rotation_vector',
'as_euler_angles', 'from_euler_angles',
'as_spherical_coords', 'from_spherical_coords',
'rotate_vectors', 'allclose',
'rotor_intrinsic_distance', 'rotor_chordal_distance',
'rotation_intrinsic_distance', 'rotation_chordal_distance',
'slerp_evaluate', 'squad_evaluate',
'zero', 'one', 'x', 'y', 'z', 'integrate_angular_velocity',
'squad', 'slerp', 'derivative', 'definite_integral', 'indefinite_integral']
if 'quaternion' in np.__dict__:
raise RuntimeError('The NumPy package already has a quaternion type')
np.quaternion = quaternion
np.typeDict['quaternion'] = np.dtype(quaternion)
zero = np.quaternion(0, 0, 0, 0)
one = np.quaternion(1, 0, 0, 0)
x = np.quaternion(0, 1, 0, 0)
y = np.quaternion(0, 0, 1, 0)
z = np.quaternion(0, 0, 0, 1)
rotor_intrinsic_distance = np.rotor_intrinsic_distance
rotor_chordal_distance = np.rotor_chordal_distance
rotation_intrinsic_distance = np.rotation_intrinsic_distance
rotation_chordal_distance = np.rotation_chordal_distance
def as_float_array(a):
"""View the quaternion array as an array of floats
This function is fast (of order 1 microsecond) because no data is
copied; the returned quantity is just a "view" of the original.
The output view has one more dimension (of size 4) than the input
array, but is otherwise the same shape.
"""
return np.asarray(a, dtype=np.quaternion).view((np.double, 4))
def as_quat_array(a):
"""View a float array as an array of quaternions
The input array must have a final dimension whose size is
divisible by four (or better yet *is* 4), because successive
indices in that last dimension will be considered successive
components of the output quaternion.
This function is usually fast (of order 1 microsecond) because no
data is copied; the returned quantity is just a "view" of the
original. However, if the input array is not C-contiguous
(basically, as you increment the index into the last dimension of
the array, you just move to the neighboring float in memory), the
data will need to be copied which may be quite slow. Therefore,
you should try to ensure that the input array is in that order.
Slices and transpositions will frequently break that rule.
We will not convert back from a two-spinor array because there is
no unique convention for them, so I don't want to mess with that.
Also, we want to discourage users from the slow, memory-copying
process of swapping columns required for useful definitions of
the two-spinors.
"""
a = np.asarray(a, dtype=np.double)
# fast path
if a.shape == (4,):
return quaternion(a[0], a[1], a[2], a[3])
# view only works if the last axis is C-contiguous
if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:
a = a.copy(order='C')
try:
av = a.view(np.quaternion)
except ValueError as e:
message = (str(e) + '\n '
+ 'Failed to view input data as a series of quaternions. '
+ 'Please ensure that the last dimension has size divisible by 4.\n '
+ 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))
raise ValueError(message)
# special case: don't create an axis for a single quaternion, to
# match the output of `as_float_array`
if av.shape[-1] == 1:
av = av.reshape(a.shape[:-1])
return av
def from_float_array(a):
return as_quat_array(a)
def as_spinor_array(a):
"""View a quaternion array as spinors in two-complex representation
This function is relatively slow and scales poorly, because memory
copying is apparently involved -- I think it's due to the
"advanced indexing" required to swap the columns.
"""
a = np.atleast_1d(a)
assert a.dtype == np.dtype(np.quaternion)
# I'm not sure why it has to be so complicated, but all of these steps
# appear to be necessary in this case.
return a.view(np.float).reshape(a.shape + (4,))[..., [0, 3, 2, 1]].ravel().view(np.complex).reshape(a.shape + (2,))
def as_rotation_matrix(q):
"""Convert input quaternion to 3x3 rotation matrix
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,3). This matrix should multiply (from
the left) a column vector to produce the rotated column vector.
Raises
------
ZeroDivisionError
If any of the input quaternions have norm 0.0.
"""
if q.shape == () and not isinstance(q, np.ndarray): # This is just a single quaternion
n = q.norm()
if n == 0.0:
raise ZeroDivisionError("Input to `as_rotation_matrix({0})` has zero norm".format(q))
elif abs(n-1.0) < _eps: # Input q is basically normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2), 2*(q.x*q.y - q.z*q.w), 2*(q.x*q.z + q.y*q.w)],
[2*(q.x*q.y + q.z*q.w), 1 - 2*(q.x**2 + q.z**2), 2*(q.y*q.z - q.x*q.w)],
[2*(q.x*q.z - q.y*q.w), 2*(q.y*q.z + q.x*q.w), 1 - 2*(q.x**2 + q.y**2)]
])
else: # Input q is not normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2)/n, 2*(q.x*q.y - q.z*q.w)/n, 2*(q.x*q.z + q.y*q.w)/n],
[2*(q.x*q.y + q.z*q.w)/n, 1 - 2*(q.x**2 + q.z**2)/n, 2*(q.y*q.z - q.x*q.w)/n],
[2*(q.x*q.z - q.y*q.w)/n, 2*(q.y*q.z + q.x*q.w)/n, 1 - 2*(q.x**2 + q.y**2)/n]
])
else: # This is an array of quaternions
n = np.norm(q)
if np.any(n == 0.0):
raise ZeroDivisionError("Array input to `as_rotation_matrix` has at least one element with zero norm")
else: # Assume input q is not normalized
m = np.empty(q.shape + (3, 3))
q = as_float_array(q)
m[..., 0, 0] = 1.0 - 2*(q[..., 2]**2 + q[..., 3]**2)/n
m[..., 0, 1] = 2*(q[..., 1]*q[..., 2] - q[..., 3]*q[..., 0])/n
m[..., 0, 2] = 2*(q[..., 1]*q[..., 3] + q[..., 2]*q[..., 0])/n
m[..., 1, 0] = 2*(q[..., 1]*q[..., 2] + q[..., 3]*q[..., 0])/n
m[..., 1, 1] = 1.0 - 2*(q[..., 1]**2 + q[..., 3]**2)/n
m[..., 1, 2] = 2*(q[..., 2]*q[..., 3] - q[..., 1]*q[..., 0])/n
m[..., 2, 0] = 2*(q[..., 1]*q[..., 3] - q[..., 2]*q[..., 0])/n
m[..., 2, 1] = 2*(q[..., 2]*q[..., 3] + q[..., 1]*q[..., 0])/n
m[..., 2, 2] = 1.0 - 2*(q[..., 1]**2 + q[..., 2]**2)/n
return m
def from_rotation_matrix(rot, nonorthogonal=True):
"""Convert input 3x3 rotation matrix to unit quaternion
By default, if scipy.linalg is available, this function uses
Bar-Itzhack's algorithm to allow for non-orthogonal matrices.
[J. Guidance, Vol. 23, No. 6, p. 1085 <http://dx.doi.org/10.2514/2.4654>]
This will almost certainly be quite a bit slower than simpler versions,
though it will be more robust to numerical errors in the rotation matrix.
Also note that Bar-Itzhack uses some pretty weird conventions. The last
component of the quaternion appears to represent the scalar, and the
quaternion itself is conjugated relative to the convention used
throughout this module.
If scipy.linalg is not available or if the optional
`nonorthogonal` parameter is set to `False`, this function falls
back to the possibly faster, but less robust, algorithm of Markley
[J. Guidance, Vol. 31, No. 2, p. 440
<http://dx.doi.org/10.2514/1.31730>].
Parameters
----------
rot: (...Nx3x3) float array
Each 3x3 matrix represents a rotation by multiplying (from the left)
a column vector to produce a rotated column vector. Note that this
input may actually have ndims>3; it is just assumed that the last
two dimensions have size 3, representing the matrix.
nonorthogonal: bool, optional
If scipy.linalg is available, use the more robust algorithm of
Bar-Itzhack. Default value is True.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-2].
Raises
------
LinAlgError
If any of the eigenvalue solutions does not converge
"""
try:
from scipy import linalg
except ImportError:
linalg = False
rot = np.array(rot, copy=False)
shape = rot.shape[:-2]
if linalg and nonorthogonal:
from operator import mul
from functools import reduce
K3 = np.empty(shape+(4, 4))
K3[..., 0, 0] = (rot[..., 0, 0] - rot[..., 1, 1] - rot[..., 2, 2])/3.0
K3[..., 0, 1] = (rot[..., 1, 0] + rot[..., 0, 1])/3.0
K3[..., 0, 2] = (rot[..., 2, 0] + rot[..., 0, 2])/3.0
K3[..., 0, 3] = (rot[..., 1, 2] - rot[..., 2, 1])/3.0
K3[..., 1, 0] = K3[..., 0, 1]
K3[..., 1, 1] = (rot[..., 1, 1] - rot[..., 0, 0] - rot[..., 2, 2])/3.0
K3[..., 1, 2] = (rot[..., 2, 1] + rot[..., 1, 2])/3.0
K3[..., 1, 3] = (rot[..., 2, 0] - rot[..., 0, 2])/3.0
K3[..., 2, 0] = K3[..., 0, 2]
K3[..., 2, 1] = K3[..., 1, 2]
K3[..., 2, 2] = (rot[..., 2, 2] - rot[..., 0, 0] - rot[..., 1, 1])/3.0
K3[..., 2, 3] = (rot[..., 0, 1] - rot[..., 1, 0])/3.0
K3[..., 3, 0] = K3[..., 0, 3]
K3[..., 3, 1] = K3[..., 1, 3]
K3[..., 3, 2] = K3[..., 2, 3]
K3[..., 3, 3] = (rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2])/3.0
if not shape:
q = zero.copy()
eigvals, eigvecs = linalg.eigh(K3.T, eigvals=(3, 3))
q.components[0] = eigvecs[-1]
q.components[1:] = -eigvecs[:-1].flatten()
return q
else:
q = np.empty(shape+(4,), dtype=np.float)
for flat_index in range(reduce(mul, shape)):
multi_index = np.unravel_index(flat_index, shape)
eigvals, eigvecs = linalg.eigh(K3[multi_index], eigvals=(3, 3))
q[multi_index+(0,)] = eigvecs[-1]
q[multi_index+(slice(1,None),)] = -eigvecs[:-1].flatten()
return as_quat_array(q)
else: # No scipy.linalg or not `nonorthogonal`
diagonals = np.empty(shape+(4,))
diagonals[..., 0] = rot[..., 0, 0]
diagonals[..., 1] = rot[..., 1, 1]
diagonals[..., 2] = rot[..., 2, 2]
diagonals[..., 3] = rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2]
indices = np.argmax(diagonals, axis=-1)
q = diagonals # reuse storage space
indices_i = (indices == 0)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 1] = 1 + rot_i[..., 0, 0] - rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 2] = rot_i[..., 0, 1] + rot_i[..., 1, 0]
q[indices_i, 3] = rot_i[..., 0, 2] + rot_i[..., 2, 0]
indices_i = (indices == 1)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 1] = rot_i[..., 1, 0] + rot_i[..., 0, 1]
q[indices_i, 2] = 1 - rot_i[..., 0, 0] + rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 3] = rot_i[..., 1, 2] + rot_i[..., 2, 1]
indices_i = (indices == 2)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q[indices_i, 1] = rot_i[..., 2, 0] + rot_i[..., 0, 2]
q[indices_i, 2] = rot_i[..., 2, 1] + rot_i[..., 1, 2]
q[indices_i, 3] = 1 - rot_i[..., 0, 0] - rot_i[..., 1, 1] + rot_i[..., 2, 2]
indices_i = (indices == 3)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = 1 + rot_i[..., 0, 0] + rot_i[..., 1, 1] + rot_i[..., 2, 2]
q[indices_i, 1] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 2] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 3] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q /= np.linalg.norm(q, axis=-1)[..., np.newaxis]
return as_quat_array(q)
def as_rotation_vector(q):
"""Convert input quaternion to the axis-angle representation
Note that if any of the input quaternions has norm zero, no error is
raised, but NaNs will appear in the output.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,). Each vector represents the axis of
the rotation, with norm proportional to the angle of the rotation in
radians.
"""
return as_float_array(2*np.log(np.normalized(q)))[..., 1:]
def from_rotation_vector(rot):
"""Convert input 3-vector in axis-angle representation to unit quaternion
Parameters
----------
rot: (Nx3) float array
Each vector represents the axis of the rotation, with norm
proportional to the angle of the rotation in radians.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-1].
"""
rot = np.array(rot, copy=False)
quats = np.zeros(rot.shape[:-1]+(4,))
quats[..., 1:] = rot[...]/2
quats = as_quat_array(quats)
return np.exp(quats)
def as_euler_angles(q):
"""Open Pandora's Box
If somebody is trying to make you use Euler angles, tell them no, and
walk away, and go and tell your mum.
You don't want to use Euler angles. They are awful. Stay away. It's
one thing to convert from Euler angles to quaternions; at least you're
moving in the right direction. But to go the other way?! It's just not
right.
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles are naturally in radians.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
alpha_beta_gamma: float array
Output shape is q.shape+(3,). These represent the angles (alpha,
beta, gamma) in radians, where the normalized input quaternion
represents `exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)`.
Raises
------
AllHell
...if you try to actually use Euler angles, when you could have
been using quaternions like a sensible person.
"""
alpha_beta_gamma = np.empty(q.shape + (3,), dtype=np.float)
n = np.norm(q)
q = as_float_array(q)
alpha_beta_gamma[..., 0] = np.arctan2(q[..., 3], q[..., 0]) + np.arctan2(-q[..., 1], q[..., 2])
alpha_beta_gamma[..., 1] = 2*np.arccos(np.sqrt((q[..., 0]**2 + q[..., 3]**2)/n))
alpha_beta_gamma[..., 2] = np.arctan2(q[..., 3], q[..., 0]) - np.arctan2(-q[..., 1], q[..., 2])
return alpha_beta_gamma
def from_euler_angles(alpha_beta_gamma, beta=None, gamma=None):
"""Improve your life drastically
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles naturally must be in radians for this to make any sense.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
alpha_beta_gamma: float or array of floats
This argument may either contain an array with last dimension of
size 3, where those three elements describe the (alpha, beta, gamma)
radian values for each rotation; or it may contain just the alpha
values, in which case the next two arguments must also be given.
beta: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and third arguments.
gamma: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and second arguments.
Returns
-------
R: quaternion array
The shape of this array will be the same as the input, except that
the last dimension will be removed.
"""
# Figure out the input angles from either type of input
if gamma is None:
alpha_beta_gamma = np.asarray(alpha_beta_gamma, dtype=np.double)
alpha = alpha_beta_gamma[..., 0]
beta = alpha_beta_gamma[..., 1]
gamma = alpha_beta_gamma[..., 2]
else:
alpha = np.asarray(alpha_beta_gamma, dtype=np.double)
beta = np.asarray(beta, dtype=np.double)
gamma = np.asarray(gamma, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(alpha, beta, gamma).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(beta/2)*np.cos((alpha+gamma)/2) # scalar quaternion components
R[..., 1] = -np.sin(beta/2)*np.sin((alpha-gamma)/2) # x quaternion components
R[..., 2] = np.sin(beta/2)*np.cos((alpha-gamma)/2) # y quaternion components
R[..., 3] = np.cos(beta/2)*np.sin((alpha+gamma)/2) # z quaternion components
return as_quat_array(R)
def as_spherical_coords(q):
"""Return the spherical coordinates corresponding to this quaternion
Obviously, spherical coordinates do not contain as much information as a
quaternion, so this function does lose some information. However, the
returned spherical coordinates will represent the point(s) on the sphere
to which the input quaternion(s) rotate the z axis.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must be nonzero
Returns
-------
vartheta_varphi: float array
Output shape is q.shape+(2,). These represent the angles (vartheta,
varphi) in radians, where the normalized input quaternion represents
`exp(varphi*z/2) * exp(vartheta*y/2)`, up to an arbitrary inital
rotation about `z`.
"""
return as_euler_angles(q)[..., 1::-1]
def rotate_vectors(R, v, axis=-1):
"""Rotate vectors by given quaternions
For simplicity, this function simply converts the input
quaternion(s) to a matrix, and rotates the input vector(s) by the
usual matrix multiplication. However, it should be noted that if
each input quaternion is only used to rotate a single vector, it
is more efficient (in terms of operation counts) to use the
formula
v' = v + 2 * r x (s * v + r x v) / m
where x represents the cross product, s and r are the scalar and
vector parts of the quaternion, respectively, and m is the sum of
the squares of the components of the quaternion. If you are
looping over a very large number of quaternions, and just rotating
a single vector each time, you might want to implement that
alternative algorithm using numba (or something that doesn't use
python).
Parameters
==========
R: quaternion array
Quaternions by which to rotate the input vectors
v: float array
Three-vectors to be rotated.
axis: int
Axis of the `v` array to use as the vector dimension. This
axis of `v` must have length 3.
Returns
=======
vprime: float array
The rotated vectors. This array has shape R.shape+v.shape.
"""
R = np.asarray(R, dtype=np.quaternion)
v = np.asarray(v, dtype=float)
if v.ndim < 1 or 3 not in v.shape:
raise ValueError("Input `v` does not have at least one dimension of length 3")
if v.shape[axis] != 3:
raise ValueError("Input `v` axis {0} has length {1}, not 3.".format(axis, v.shape[axis]))
m = as_rotation_matrix(R)
m_axes = list(range(m.ndim))
v_axes = list(range(m.ndim, m.ndim+v.ndim))
mv_axes = list(v_axes)
mv_axes[axis] = m_axes[-2]
mv_axes = m_axes[:-2] + mv_axes
v_axes[axis] = m_axes[-1]
return np.einsum(m, m_axes, v, v_axes, mv_axes)
def isclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False):
"""
Returns a boolean array where two arrays are element-wise equal within a
tolerance.
This function is essentially a copy of the `numpy.isclose` function,
with different default tolerances and one minor changes necessary to
deal correctly with quaternions.
The tolerance values are positive, typically very small numbers. The
relative difference (`rtol` * abs(`b`)) and the absolute difference
`atol` are added together to compare against the absolute difference
between `a` and `b`.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
Returns
-------
y : array_like
Returns a boolean array of where `a` and `b` are equal within the
given tolerance. If both `a` and `b` are scalars, returns a single
boolean value.
See Also
--------
allclose
Notes
-----
For finite values, isclose uses the following equation to test whether
two floating point values are equivalent:
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`isclose(a, b)` might be different from `isclose(b, a)` in
some rare cases.
Examples
--------
>>> quaternion.isclose([1e10*quaternion.x, 1e-7*quaternion.y], [1.00001e10*quaternion.x, 1e-8*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, False])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.00001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, True])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.0001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([False, True])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y])
array([True, False])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y], equal_nan=True)
array([True, True])
"""
def within_tol(x, y, atol, rtol):
with np.errstate(invalid='ignore'):
result = np.less_equal(abs(x-y), atol + rtol * abs(y))
if np.isscalar(a) and np.isscalar(b):
result = bool(result)
return result
x = np.array(a, copy=False, subok=True, ndmin=1)
y = np.array(b, copy=False, subok=True, ndmin=1)
# Make sure y is an inexact type to avoid bad behavior on abs(MIN_INT).
# This will cause casting of x later. Also, make sure to allow subclasses
# (e.g., for numpy.ma).
try:
dt = np.result_type(y, 1.)
except TypeError:
dt = np.dtype(np.quaternion)
y = np.array(y, dtype=dt, copy=False, subok=True)
xfin = np.isfinite(x)
yfin = np.isfinite(y)
if np.all(xfin) and np.all(yfin):
return within_tol(x, y, atol, rtol)
else:
finite = xfin & yfin
cond = np.zeros_like(finite, subok=True)
# Because we're using boolean indexing, x & y must be the same shape.
# Ideally, we'd just do x, y = broadcast_arrays(x, y). It's in
# lib.stride_tricks, though, so we can't import it here.
x = x * np.ones_like(cond)
y = y * np.ones_like(cond)
# Avoid subtraction with infinite/nan values...
cond[finite] = within_tol(x[finite], y[finite], atol, rtol)
# Check for equality of infinite values...
cond[~finite] = (x[~finite] == y[~finite])
if equal_nan:
# Make NaN == NaN
both_nan = np.isnan(x) & np.isnan(y)
cond[both_nan] = both_nan[both_nan]
if np.isscalar(a) and np.isscalar(b):
return bool(cond)
else:
return cond
def allclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False, verbose=False):
"""
Returns True if two arrays are element-wise equal within a tolerance.
This function is essentially a wrapper for the `quaternion.isclose`
function, but returns a single boolean value of True if all elements
of the output from `quaternion.isclose` are True, and False otherwise.
This function also adds the option.
Note that this function has stricter tolerances than the
`numpy.allclose` function, as well as the additional `verbose` option.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
verbose : bool
If the return value is False,
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
See Also
--------
isclose, numpy.all, numpy.any, numpy.allclose
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
Notes
-----
If the following equation is element-wise True, then allclose returns
True.
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`allclose(a, b)` might be different from `allclose(b, a)` in
some rare cases.
"""
close = isclose(a, b, rtol=rtol, atol=atol, equal_nan=equal_nan)
result = np.all(close)
if verbose and not result:
print('Non-close values:')
for i in np.argwhere(close == False):
i = tuple(i)
print('\n x[{0}]={1}\n y[{0}]={2}'.format(i, a[i], b[i]))
return result
|
moble/quaternion
|
__init__.py
|
rotate_vectors
|
python
|
def rotate_vectors(R, v, axis=-1):
R = np.asarray(R, dtype=np.quaternion)
v = np.asarray(v, dtype=float)
if v.ndim < 1 or 3 not in v.shape:
raise ValueError("Input `v` does not have at least one dimension of length 3")
if v.shape[axis] != 3:
raise ValueError("Input `v` axis {0} has length {1}, not 3.".format(axis, v.shape[axis]))
m = as_rotation_matrix(R)
m_axes = list(range(m.ndim))
v_axes = list(range(m.ndim, m.ndim+v.ndim))
mv_axes = list(v_axes)
mv_axes[axis] = m_axes[-2]
mv_axes = m_axes[:-2] + mv_axes
v_axes[axis] = m_axes[-1]
return np.einsum(m, m_axes, v, v_axes, mv_axes)
|
Rotate vectors by given quaternions
For simplicity, this function simply converts the input
quaternion(s) to a matrix, and rotates the input vector(s) by the
usual matrix multiplication. However, it should be noted that if
each input quaternion is only used to rotate a single vector, it
is more efficient (in terms of operation counts) to use the
formula
v' = v + 2 * r x (s * v + r x v) / m
where x represents the cross product, s and r are the scalar and
vector parts of the quaternion, respectively, and m is the sum of
the squares of the components of the quaternion. If you are
looping over a very large number of quaternions, and just rotating
a single vector each time, you might want to implement that
alternative algorithm using numba (or something that doesn't use
python).
Parameters
==========
R: quaternion array
Quaternions by which to rotate the input vectors
v: float array
Three-vectors to be rotated.
axis: int
Axis of the `v` array to use as the vector dimension. This
axis of `v` must have length 3.
Returns
=======
vprime: float array
The rotated vectors. This array has shape R.shape+v.shape.
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/__init__.py#L562-L612
|
[
"def as_rotation_matrix(q):\n \"\"\"Convert input quaternion to 3x3 rotation matrix\n\n Parameters\n ----------\n q: quaternion or array of quaternions\n The quaternion(s) need not be normalized, but must all be nonzero\n\n Returns\n -------\n rot: float array\n Output shape is q.shape+(3,3). This matrix should multiply (from\n the left) a column vector to produce the rotated column vector.\n\n Raises\n ------\n ZeroDivisionError\n If any of the input quaternions have norm 0.0.\n\n \"\"\"\n if q.shape == () and not isinstance(q, np.ndarray): # This is just a single quaternion\n n = q.norm()\n if n == 0.0:\n raise ZeroDivisionError(\"Input to `as_rotation_matrix({0})` has zero norm\".format(q))\n elif abs(n-1.0) < _eps: # Input q is basically normalized\n return np.array([\n [1 - 2*(q.y**2 + q.z**2), 2*(q.x*q.y - q.z*q.w), 2*(q.x*q.z + q.y*q.w)],\n [2*(q.x*q.y + q.z*q.w), 1 - 2*(q.x**2 + q.z**2), 2*(q.y*q.z - q.x*q.w)],\n [2*(q.x*q.z - q.y*q.w), 2*(q.y*q.z + q.x*q.w), 1 - 2*(q.x**2 + q.y**2)]\n ])\n else: # Input q is not normalized\n return np.array([\n [1 - 2*(q.y**2 + q.z**2)/n, 2*(q.x*q.y - q.z*q.w)/n, 2*(q.x*q.z + q.y*q.w)/n],\n [2*(q.x*q.y + q.z*q.w)/n, 1 - 2*(q.x**2 + q.z**2)/n, 2*(q.y*q.z - q.x*q.w)/n],\n [2*(q.x*q.z - q.y*q.w)/n, 2*(q.y*q.z + q.x*q.w)/n, 1 - 2*(q.x**2 + q.y**2)/n]\n ])\n else: # This is an array of quaternions\n n = np.norm(q)\n if np.any(n == 0.0):\n raise ZeroDivisionError(\"Array input to `as_rotation_matrix` has at least one element with zero norm\")\n else: # Assume input q is not normalized\n m = np.empty(q.shape + (3, 3))\n q = as_float_array(q)\n m[..., 0, 0] = 1.0 - 2*(q[..., 2]**2 + q[..., 3]**2)/n\n m[..., 0, 1] = 2*(q[..., 1]*q[..., 2] - q[..., 3]*q[..., 0])/n\n m[..., 0, 2] = 2*(q[..., 1]*q[..., 3] + q[..., 2]*q[..., 0])/n\n m[..., 1, 0] = 2*(q[..., 1]*q[..., 2] + q[..., 3]*q[..., 0])/n\n m[..., 1, 1] = 1.0 - 2*(q[..., 1]**2 + q[..., 3]**2)/n\n m[..., 1, 2] = 2*(q[..., 2]*q[..., 3] - q[..., 1]*q[..., 0])/n\n m[..., 2, 0] = 2*(q[..., 1]*q[..., 3] - q[..., 2]*q[..., 0])/n\n m[..., 2, 1] = 2*(q[..., 2]*q[..., 3] + q[..., 1]*q[..., 0])/n\n m[..., 2, 2] = 1.0 - 2*(q[..., 1]**2 + q[..., 2]**2)/n\n return m\n"
] |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import division, print_function, absolute_import
import numpy as np
from .numpy_quaternion import (quaternion, _eps,
slerp_evaluate, squad_evaluate,
# slerp_vectorized, squad_vectorized,
# slerp, squad,
)
from .quaternion_time_series import slerp, squad, integrate_angular_velocity, minimal_rotation
from .calculus import derivative, definite_integral, indefinite_integral
from .means import mean_rotor_in_chordal_metric, optimal_alignment_in_chordal_metric
from ._version import __version__
__doc_title__ = "Quaternion dtype for NumPy"
__doc__ = "Adds a quaternion dtype to NumPy."
__all__ = ['quaternion',
'as_quat_array', 'as_spinor_array',
'as_float_array', 'from_float_array',
'as_rotation_matrix', 'from_rotation_matrix',
'as_rotation_vector', 'from_rotation_vector',
'as_euler_angles', 'from_euler_angles',
'as_spherical_coords', 'from_spherical_coords',
'rotate_vectors', 'allclose',
'rotor_intrinsic_distance', 'rotor_chordal_distance',
'rotation_intrinsic_distance', 'rotation_chordal_distance',
'slerp_evaluate', 'squad_evaluate',
'zero', 'one', 'x', 'y', 'z', 'integrate_angular_velocity',
'squad', 'slerp', 'derivative', 'definite_integral', 'indefinite_integral']
if 'quaternion' in np.__dict__:
raise RuntimeError('The NumPy package already has a quaternion type')
np.quaternion = quaternion
np.typeDict['quaternion'] = np.dtype(quaternion)
zero = np.quaternion(0, 0, 0, 0)
one = np.quaternion(1, 0, 0, 0)
x = np.quaternion(0, 1, 0, 0)
y = np.quaternion(0, 0, 1, 0)
z = np.quaternion(0, 0, 0, 1)
rotor_intrinsic_distance = np.rotor_intrinsic_distance
rotor_chordal_distance = np.rotor_chordal_distance
rotation_intrinsic_distance = np.rotation_intrinsic_distance
rotation_chordal_distance = np.rotation_chordal_distance
def as_float_array(a):
"""View the quaternion array as an array of floats
This function is fast (of order 1 microsecond) because no data is
copied; the returned quantity is just a "view" of the original.
The output view has one more dimension (of size 4) than the input
array, but is otherwise the same shape.
"""
return np.asarray(a, dtype=np.quaternion).view((np.double, 4))
def as_quat_array(a):
"""View a float array as an array of quaternions
The input array must have a final dimension whose size is
divisible by four (or better yet *is* 4), because successive
indices in that last dimension will be considered successive
components of the output quaternion.
This function is usually fast (of order 1 microsecond) because no
data is copied; the returned quantity is just a "view" of the
original. However, if the input array is not C-contiguous
(basically, as you increment the index into the last dimension of
the array, you just move to the neighboring float in memory), the
data will need to be copied which may be quite slow. Therefore,
you should try to ensure that the input array is in that order.
Slices and transpositions will frequently break that rule.
We will not convert back from a two-spinor array because there is
no unique convention for them, so I don't want to mess with that.
Also, we want to discourage users from the slow, memory-copying
process of swapping columns required for useful definitions of
the two-spinors.
"""
a = np.asarray(a, dtype=np.double)
# fast path
if a.shape == (4,):
return quaternion(a[0], a[1], a[2], a[3])
# view only works if the last axis is C-contiguous
if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:
a = a.copy(order='C')
try:
av = a.view(np.quaternion)
except ValueError as e:
message = (str(e) + '\n '
+ 'Failed to view input data as a series of quaternions. '
+ 'Please ensure that the last dimension has size divisible by 4.\n '
+ 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))
raise ValueError(message)
# special case: don't create an axis for a single quaternion, to
# match the output of `as_float_array`
if av.shape[-1] == 1:
av = av.reshape(a.shape[:-1])
return av
def from_float_array(a):
return as_quat_array(a)
def as_spinor_array(a):
"""View a quaternion array as spinors in two-complex representation
This function is relatively slow and scales poorly, because memory
copying is apparently involved -- I think it's due to the
"advanced indexing" required to swap the columns.
"""
a = np.atleast_1d(a)
assert a.dtype == np.dtype(np.quaternion)
# I'm not sure why it has to be so complicated, but all of these steps
# appear to be necessary in this case.
return a.view(np.float).reshape(a.shape + (4,))[..., [0, 3, 2, 1]].ravel().view(np.complex).reshape(a.shape + (2,))
def as_rotation_matrix(q):
"""Convert input quaternion to 3x3 rotation matrix
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,3). This matrix should multiply (from
the left) a column vector to produce the rotated column vector.
Raises
------
ZeroDivisionError
If any of the input quaternions have norm 0.0.
"""
if q.shape == () and not isinstance(q, np.ndarray): # This is just a single quaternion
n = q.norm()
if n == 0.0:
raise ZeroDivisionError("Input to `as_rotation_matrix({0})` has zero norm".format(q))
elif abs(n-1.0) < _eps: # Input q is basically normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2), 2*(q.x*q.y - q.z*q.w), 2*(q.x*q.z + q.y*q.w)],
[2*(q.x*q.y + q.z*q.w), 1 - 2*(q.x**2 + q.z**2), 2*(q.y*q.z - q.x*q.w)],
[2*(q.x*q.z - q.y*q.w), 2*(q.y*q.z + q.x*q.w), 1 - 2*(q.x**2 + q.y**2)]
])
else: # Input q is not normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2)/n, 2*(q.x*q.y - q.z*q.w)/n, 2*(q.x*q.z + q.y*q.w)/n],
[2*(q.x*q.y + q.z*q.w)/n, 1 - 2*(q.x**2 + q.z**2)/n, 2*(q.y*q.z - q.x*q.w)/n],
[2*(q.x*q.z - q.y*q.w)/n, 2*(q.y*q.z + q.x*q.w)/n, 1 - 2*(q.x**2 + q.y**2)/n]
])
else: # This is an array of quaternions
n = np.norm(q)
if np.any(n == 0.0):
raise ZeroDivisionError("Array input to `as_rotation_matrix` has at least one element with zero norm")
else: # Assume input q is not normalized
m = np.empty(q.shape + (3, 3))
q = as_float_array(q)
m[..., 0, 0] = 1.0 - 2*(q[..., 2]**2 + q[..., 3]**2)/n
m[..., 0, 1] = 2*(q[..., 1]*q[..., 2] - q[..., 3]*q[..., 0])/n
m[..., 0, 2] = 2*(q[..., 1]*q[..., 3] + q[..., 2]*q[..., 0])/n
m[..., 1, 0] = 2*(q[..., 1]*q[..., 2] + q[..., 3]*q[..., 0])/n
m[..., 1, 1] = 1.0 - 2*(q[..., 1]**2 + q[..., 3]**2)/n
m[..., 1, 2] = 2*(q[..., 2]*q[..., 3] - q[..., 1]*q[..., 0])/n
m[..., 2, 0] = 2*(q[..., 1]*q[..., 3] - q[..., 2]*q[..., 0])/n
m[..., 2, 1] = 2*(q[..., 2]*q[..., 3] + q[..., 1]*q[..., 0])/n
m[..., 2, 2] = 1.0 - 2*(q[..., 1]**2 + q[..., 2]**2)/n
return m
def from_rotation_matrix(rot, nonorthogonal=True):
"""Convert input 3x3 rotation matrix to unit quaternion
By default, if scipy.linalg is available, this function uses
Bar-Itzhack's algorithm to allow for non-orthogonal matrices.
[J. Guidance, Vol. 23, No. 6, p. 1085 <http://dx.doi.org/10.2514/2.4654>]
This will almost certainly be quite a bit slower than simpler versions,
though it will be more robust to numerical errors in the rotation matrix.
Also note that Bar-Itzhack uses some pretty weird conventions. The last
component of the quaternion appears to represent the scalar, and the
quaternion itself is conjugated relative to the convention used
throughout this module.
If scipy.linalg is not available or if the optional
`nonorthogonal` parameter is set to `False`, this function falls
back to the possibly faster, but less robust, algorithm of Markley
[J. Guidance, Vol. 31, No. 2, p. 440
<http://dx.doi.org/10.2514/1.31730>].
Parameters
----------
rot: (...Nx3x3) float array
Each 3x3 matrix represents a rotation by multiplying (from the left)
a column vector to produce a rotated column vector. Note that this
input may actually have ndims>3; it is just assumed that the last
two dimensions have size 3, representing the matrix.
nonorthogonal: bool, optional
If scipy.linalg is available, use the more robust algorithm of
Bar-Itzhack. Default value is True.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-2].
Raises
------
LinAlgError
If any of the eigenvalue solutions does not converge
"""
try:
from scipy import linalg
except ImportError:
linalg = False
rot = np.array(rot, copy=False)
shape = rot.shape[:-2]
if linalg and nonorthogonal:
from operator import mul
from functools import reduce
K3 = np.empty(shape+(4, 4))
K3[..., 0, 0] = (rot[..., 0, 0] - rot[..., 1, 1] - rot[..., 2, 2])/3.0
K3[..., 0, 1] = (rot[..., 1, 0] + rot[..., 0, 1])/3.0
K3[..., 0, 2] = (rot[..., 2, 0] + rot[..., 0, 2])/3.0
K3[..., 0, 3] = (rot[..., 1, 2] - rot[..., 2, 1])/3.0
K3[..., 1, 0] = K3[..., 0, 1]
K3[..., 1, 1] = (rot[..., 1, 1] - rot[..., 0, 0] - rot[..., 2, 2])/3.0
K3[..., 1, 2] = (rot[..., 2, 1] + rot[..., 1, 2])/3.0
K3[..., 1, 3] = (rot[..., 2, 0] - rot[..., 0, 2])/3.0
K3[..., 2, 0] = K3[..., 0, 2]
K3[..., 2, 1] = K3[..., 1, 2]
K3[..., 2, 2] = (rot[..., 2, 2] - rot[..., 0, 0] - rot[..., 1, 1])/3.0
K3[..., 2, 3] = (rot[..., 0, 1] - rot[..., 1, 0])/3.0
K3[..., 3, 0] = K3[..., 0, 3]
K3[..., 3, 1] = K3[..., 1, 3]
K3[..., 3, 2] = K3[..., 2, 3]
K3[..., 3, 3] = (rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2])/3.0
if not shape:
q = zero.copy()
eigvals, eigvecs = linalg.eigh(K3.T, eigvals=(3, 3))
q.components[0] = eigvecs[-1]
q.components[1:] = -eigvecs[:-1].flatten()
return q
else:
q = np.empty(shape+(4,), dtype=np.float)
for flat_index in range(reduce(mul, shape)):
multi_index = np.unravel_index(flat_index, shape)
eigvals, eigvecs = linalg.eigh(K3[multi_index], eigvals=(3, 3))
q[multi_index+(0,)] = eigvecs[-1]
q[multi_index+(slice(1,None),)] = -eigvecs[:-1].flatten()
return as_quat_array(q)
else: # No scipy.linalg or not `nonorthogonal`
diagonals = np.empty(shape+(4,))
diagonals[..., 0] = rot[..., 0, 0]
diagonals[..., 1] = rot[..., 1, 1]
diagonals[..., 2] = rot[..., 2, 2]
diagonals[..., 3] = rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2]
indices = np.argmax(diagonals, axis=-1)
q = diagonals # reuse storage space
indices_i = (indices == 0)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 1] = 1 + rot_i[..., 0, 0] - rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 2] = rot_i[..., 0, 1] + rot_i[..., 1, 0]
q[indices_i, 3] = rot_i[..., 0, 2] + rot_i[..., 2, 0]
indices_i = (indices == 1)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 1] = rot_i[..., 1, 0] + rot_i[..., 0, 1]
q[indices_i, 2] = 1 - rot_i[..., 0, 0] + rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 3] = rot_i[..., 1, 2] + rot_i[..., 2, 1]
indices_i = (indices == 2)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q[indices_i, 1] = rot_i[..., 2, 0] + rot_i[..., 0, 2]
q[indices_i, 2] = rot_i[..., 2, 1] + rot_i[..., 1, 2]
q[indices_i, 3] = 1 - rot_i[..., 0, 0] - rot_i[..., 1, 1] + rot_i[..., 2, 2]
indices_i = (indices == 3)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = 1 + rot_i[..., 0, 0] + rot_i[..., 1, 1] + rot_i[..., 2, 2]
q[indices_i, 1] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 2] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 3] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q /= np.linalg.norm(q, axis=-1)[..., np.newaxis]
return as_quat_array(q)
def as_rotation_vector(q):
"""Convert input quaternion to the axis-angle representation
Note that if any of the input quaternions has norm zero, no error is
raised, but NaNs will appear in the output.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,). Each vector represents the axis of
the rotation, with norm proportional to the angle of the rotation in
radians.
"""
return as_float_array(2*np.log(np.normalized(q)))[..., 1:]
def from_rotation_vector(rot):
"""Convert input 3-vector in axis-angle representation to unit quaternion
Parameters
----------
rot: (Nx3) float array
Each vector represents the axis of the rotation, with norm
proportional to the angle of the rotation in radians.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-1].
"""
rot = np.array(rot, copy=False)
quats = np.zeros(rot.shape[:-1]+(4,))
quats[..., 1:] = rot[...]/2
quats = as_quat_array(quats)
return np.exp(quats)
def as_euler_angles(q):
"""Open Pandora's Box
If somebody is trying to make you use Euler angles, tell them no, and
walk away, and go and tell your mum.
You don't want to use Euler angles. They are awful. Stay away. It's
one thing to convert from Euler angles to quaternions; at least you're
moving in the right direction. But to go the other way?! It's just not
right.
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles are naturally in radians.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
alpha_beta_gamma: float array
Output shape is q.shape+(3,). These represent the angles (alpha,
beta, gamma) in radians, where the normalized input quaternion
represents `exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)`.
Raises
------
AllHell
...if you try to actually use Euler angles, when you could have
been using quaternions like a sensible person.
"""
alpha_beta_gamma = np.empty(q.shape + (3,), dtype=np.float)
n = np.norm(q)
q = as_float_array(q)
alpha_beta_gamma[..., 0] = np.arctan2(q[..., 3], q[..., 0]) + np.arctan2(-q[..., 1], q[..., 2])
alpha_beta_gamma[..., 1] = 2*np.arccos(np.sqrt((q[..., 0]**2 + q[..., 3]**2)/n))
alpha_beta_gamma[..., 2] = np.arctan2(q[..., 3], q[..., 0]) - np.arctan2(-q[..., 1], q[..., 2])
return alpha_beta_gamma
def from_euler_angles(alpha_beta_gamma, beta=None, gamma=None):
"""Improve your life drastically
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles naturally must be in radians for this to make any sense.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
alpha_beta_gamma: float or array of floats
This argument may either contain an array with last dimension of
size 3, where those three elements describe the (alpha, beta, gamma)
radian values for each rotation; or it may contain just the alpha
values, in which case the next two arguments must also be given.
beta: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and third arguments.
gamma: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and second arguments.
Returns
-------
R: quaternion array
The shape of this array will be the same as the input, except that
the last dimension will be removed.
"""
# Figure out the input angles from either type of input
if gamma is None:
alpha_beta_gamma = np.asarray(alpha_beta_gamma, dtype=np.double)
alpha = alpha_beta_gamma[..., 0]
beta = alpha_beta_gamma[..., 1]
gamma = alpha_beta_gamma[..., 2]
else:
alpha = np.asarray(alpha_beta_gamma, dtype=np.double)
beta = np.asarray(beta, dtype=np.double)
gamma = np.asarray(gamma, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(alpha, beta, gamma).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(beta/2)*np.cos((alpha+gamma)/2) # scalar quaternion components
R[..., 1] = -np.sin(beta/2)*np.sin((alpha-gamma)/2) # x quaternion components
R[..., 2] = np.sin(beta/2)*np.cos((alpha-gamma)/2) # y quaternion components
R[..., 3] = np.cos(beta/2)*np.sin((alpha+gamma)/2) # z quaternion components
return as_quat_array(R)
def as_spherical_coords(q):
"""Return the spherical coordinates corresponding to this quaternion
Obviously, spherical coordinates do not contain as much information as a
quaternion, so this function does lose some information. However, the
returned spherical coordinates will represent the point(s) on the sphere
to which the input quaternion(s) rotate the z axis.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must be nonzero
Returns
-------
vartheta_varphi: float array
Output shape is q.shape+(2,). These represent the angles (vartheta,
varphi) in radians, where the normalized input quaternion represents
`exp(varphi*z/2) * exp(vartheta*y/2)`, up to an arbitrary inital
rotation about `z`.
"""
return as_euler_angles(q)[..., 1::-1]
def from_spherical_coords(theta_phi, phi=None):
"""Return the quaternion corresponding to these spherical coordinates
Assumes the spherical coordinates correspond to the quaternion R via
R = exp(phi*z/2) * exp(theta*y/2)
The angles naturally must be in radians for this to make any sense.
Note that this quaternion rotates `z` onto the point with the given
spherical coordinates, but also rotates `x` and `y` onto the usual basis
vectors (theta and phi, respectively) at that point.
Parameters
----------
theta_phi: float or array of floats
This argument may either contain an array with last dimension of
size 2, where those two elements describe the (theta, phi) values in
radians for each point; or it may contain just the theta values in
radians, in which case the next argument must also be given.
phi: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first argument.
Returns
-------
R: quaternion array
If the second argument is not given to this function, the shape
will be the same as the input shape except for the last dimension,
which will be removed. If the second argument is given, this
output array will have the shape resulting from broadcasting the
two input arrays against each other.
"""
# Figure out the input angles from either type of input
if phi is None:
theta_phi = np.asarray(theta_phi, dtype=np.double)
theta = theta_phi[..., 0]
phi = theta_phi[..., 1]
else:
theta = np.asarray(theta_phi, dtype=np.double)
phi = np.asarray(phi, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(theta, phi).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(phi/2)*np.cos(theta/2) # scalar quaternion components
R[..., 1] = -np.sin(phi/2)*np.sin(theta/2) # x quaternion components
R[..., 2] = np.cos(phi/2)*np.sin(theta/2) # y quaternion components
R[..., 3] = np.sin(phi/2)*np.cos(theta/2) # z quaternion components
return as_quat_array(R)
def isclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False):
"""
Returns a boolean array where two arrays are element-wise equal within a
tolerance.
This function is essentially a copy of the `numpy.isclose` function,
with different default tolerances and one minor changes necessary to
deal correctly with quaternions.
The tolerance values are positive, typically very small numbers. The
relative difference (`rtol` * abs(`b`)) and the absolute difference
`atol` are added together to compare against the absolute difference
between `a` and `b`.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
Returns
-------
y : array_like
Returns a boolean array of where `a` and `b` are equal within the
given tolerance. If both `a` and `b` are scalars, returns a single
boolean value.
See Also
--------
allclose
Notes
-----
For finite values, isclose uses the following equation to test whether
two floating point values are equivalent:
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`isclose(a, b)` might be different from `isclose(b, a)` in
some rare cases.
Examples
--------
>>> quaternion.isclose([1e10*quaternion.x, 1e-7*quaternion.y], [1.00001e10*quaternion.x, 1e-8*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, False])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.00001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, True])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.0001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([False, True])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y])
array([True, False])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y], equal_nan=True)
array([True, True])
"""
def within_tol(x, y, atol, rtol):
with np.errstate(invalid='ignore'):
result = np.less_equal(abs(x-y), atol + rtol * abs(y))
if np.isscalar(a) and np.isscalar(b):
result = bool(result)
return result
x = np.array(a, copy=False, subok=True, ndmin=1)
y = np.array(b, copy=False, subok=True, ndmin=1)
# Make sure y is an inexact type to avoid bad behavior on abs(MIN_INT).
# This will cause casting of x later. Also, make sure to allow subclasses
# (e.g., for numpy.ma).
try:
dt = np.result_type(y, 1.)
except TypeError:
dt = np.dtype(np.quaternion)
y = np.array(y, dtype=dt, copy=False, subok=True)
xfin = np.isfinite(x)
yfin = np.isfinite(y)
if np.all(xfin) and np.all(yfin):
return within_tol(x, y, atol, rtol)
else:
finite = xfin & yfin
cond = np.zeros_like(finite, subok=True)
# Because we're using boolean indexing, x & y must be the same shape.
# Ideally, we'd just do x, y = broadcast_arrays(x, y). It's in
# lib.stride_tricks, though, so we can't import it here.
x = x * np.ones_like(cond)
y = y * np.ones_like(cond)
# Avoid subtraction with infinite/nan values...
cond[finite] = within_tol(x[finite], y[finite], atol, rtol)
# Check for equality of infinite values...
cond[~finite] = (x[~finite] == y[~finite])
if equal_nan:
# Make NaN == NaN
both_nan = np.isnan(x) & np.isnan(y)
cond[both_nan] = both_nan[both_nan]
if np.isscalar(a) and np.isscalar(b):
return bool(cond)
else:
return cond
def allclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False, verbose=False):
"""
Returns True if two arrays are element-wise equal within a tolerance.
This function is essentially a wrapper for the `quaternion.isclose`
function, but returns a single boolean value of True if all elements
of the output from `quaternion.isclose` are True, and False otherwise.
This function also adds the option.
Note that this function has stricter tolerances than the
`numpy.allclose` function, as well as the additional `verbose` option.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
verbose : bool
If the return value is False,
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
See Also
--------
isclose, numpy.all, numpy.any, numpy.allclose
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
Notes
-----
If the following equation is element-wise True, then allclose returns
True.
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`allclose(a, b)` might be different from `allclose(b, a)` in
some rare cases.
"""
close = isclose(a, b, rtol=rtol, atol=atol, equal_nan=equal_nan)
result = np.all(close)
if verbose and not result:
print('Non-close values:')
for i in np.argwhere(close == False):
i = tuple(i)
print('\n x[{0}]={1}\n y[{0}]={2}'.format(i, a[i], b[i]))
return result
|
moble/quaternion
|
__init__.py
|
isclose
|
python
|
def isclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False):
def within_tol(x, y, atol, rtol):
with np.errstate(invalid='ignore'):
result = np.less_equal(abs(x-y), atol + rtol * abs(y))
if np.isscalar(a) and np.isscalar(b):
result = bool(result)
return result
x = np.array(a, copy=False, subok=True, ndmin=1)
y = np.array(b, copy=False, subok=True, ndmin=1)
# Make sure y is an inexact type to avoid bad behavior on abs(MIN_INT).
# This will cause casting of x later. Also, make sure to allow subclasses
# (e.g., for numpy.ma).
try:
dt = np.result_type(y, 1.)
except TypeError:
dt = np.dtype(np.quaternion)
y = np.array(y, dtype=dt, copy=False, subok=True)
xfin = np.isfinite(x)
yfin = np.isfinite(y)
if np.all(xfin) and np.all(yfin):
return within_tol(x, y, atol, rtol)
else:
finite = xfin & yfin
cond = np.zeros_like(finite, subok=True)
# Because we're using boolean indexing, x & y must be the same shape.
# Ideally, we'd just do x, y = broadcast_arrays(x, y). It's in
# lib.stride_tricks, though, so we can't import it here.
x = x * np.ones_like(cond)
y = y * np.ones_like(cond)
# Avoid subtraction with infinite/nan values...
cond[finite] = within_tol(x[finite], y[finite], atol, rtol)
# Check for equality of infinite values...
cond[~finite] = (x[~finite] == y[~finite])
if equal_nan:
# Make NaN == NaN
both_nan = np.isnan(x) & np.isnan(y)
cond[both_nan] = both_nan[both_nan]
if np.isscalar(a) and np.isscalar(b):
return bool(cond)
else:
return cond
|
Returns a boolean array where two arrays are element-wise equal within a
tolerance.
This function is essentially a copy of the `numpy.isclose` function,
with different default tolerances and one minor changes necessary to
deal correctly with quaternions.
The tolerance values are positive, typically very small numbers. The
relative difference (`rtol` * abs(`b`)) and the absolute difference
`atol` are added together to compare against the absolute difference
between `a` and `b`.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
Returns
-------
y : array_like
Returns a boolean array of where `a` and `b` are equal within the
given tolerance. If both `a` and `b` are scalars, returns a single
boolean value.
See Also
--------
allclose
Notes
-----
For finite values, isclose uses the following equation to test whether
two floating point values are equivalent:
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`isclose(a, b)` might be different from `isclose(b, a)` in
some rare cases.
Examples
--------
>>> quaternion.isclose([1e10*quaternion.x, 1e-7*quaternion.y], [1.00001e10*quaternion.x, 1e-8*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, False])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.00001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, True])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.0001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([False, True])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y])
array([True, False])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y], equal_nan=True)
array([True, True])
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/__init__.py#L615-L722
|
[
"def within_tol(x, y, atol, rtol):\n with np.errstate(invalid='ignore'):\n result = np.less_equal(abs(x-y), atol + rtol * abs(y))\n if np.isscalar(a) and np.isscalar(b):\n result = bool(result)\n return result\n"
] |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import division, print_function, absolute_import
import numpy as np
from .numpy_quaternion import (quaternion, _eps,
slerp_evaluate, squad_evaluate,
# slerp_vectorized, squad_vectorized,
# slerp, squad,
)
from .quaternion_time_series import slerp, squad, integrate_angular_velocity, minimal_rotation
from .calculus import derivative, definite_integral, indefinite_integral
from .means import mean_rotor_in_chordal_metric, optimal_alignment_in_chordal_metric
from ._version import __version__
__doc_title__ = "Quaternion dtype for NumPy"
__doc__ = "Adds a quaternion dtype to NumPy."
__all__ = ['quaternion',
'as_quat_array', 'as_spinor_array',
'as_float_array', 'from_float_array',
'as_rotation_matrix', 'from_rotation_matrix',
'as_rotation_vector', 'from_rotation_vector',
'as_euler_angles', 'from_euler_angles',
'as_spherical_coords', 'from_spherical_coords',
'rotate_vectors', 'allclose',
'rotor_intrinsic_distance', 'rotor_chordal_distance',
'rotation_intrinsic_distance', 'rotation_chordal_distance',
'slerp_evaluate', 'squad_evaluate',
'zero', 'one', 'x', 'y', 'z', 'integrate_angular_velocity',
'squad', 'slerp', 'derivative', 'definite_integral', 'indefinite_integral']
if 'quaternion' in np.__dict__:
raise RuntimeError('The NumPy package already has a quaternion type')
np.quaternion = quaternion
np.typeDict['quaternion'] = np.dtype(quaternion)
zero = np.quaternion(0, 0, 0, 0)
one = np.quaternion(1, 0, 0, 0)
x = np.quaternion(0, 1, 0, 0)
y = np.quaternion(0, 0, 1, 0)
z = np.quaternion(0, 0, 0, 1)
rotor_intrinsic_distance = np.rotor_intrinsic_distance
rotor_chordal_distance = np.rotor_chordal_distance
rotation_intrinsic_distance = np.rotation_intrinsic_distance
rotation_chordal_distance = np.rotation_chordal_distance
def as_float_array(a):
"""View the quaternion array as an array of floats
This function is fast (of order 1 microsecond) because no data is
copied; the returned quantity is just a "view" of the original.
The output view has one more dimension (of size 4) than the input
array, but is otherwise the same shape.
"""
return np.asarray(a, dtype=np.quaternion).view((np.double, 4))
def as_quat_array(a):
"""View a float array as an array of quaternions
The input array must have a final dimension whose size is
divisible by four (or better yet *is* 4), because successive
indices in that last dimension will be considered successive
components of the output quaternion.
This function is usually fast (of order 1 microsecond) because no
data is copied; the returned quantity is just a "view" of the
original. However, if the input array is not C-contiguous
(basically, as you increment the index into the last dimension of
the array, you just move to the neighboring float in memory), the
data will need to be copied which may be quite slow. Therefore,
you should try to ensure that the input array is in that order.
Slices and transpositions will frequently break that rule.
We will not convert back from a two-spinor array because there is
no unique convention for them, so I don't want to mess with that.
Also, we want to discourage users from the slow, memory-copying
process of swapping columns required for useful definitions of
the two-spinors.
"""
a = np.asarray(a, dtype=np.double)
# fast path
if a.shape == (4,):
return quaternion(a[0], a[1], a[2], a[3])
# view only works if the last axis is C-contiguous
if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:
a = a.copy(order='C')
try:
av = a.view(np.quaternion)
except ValueError as e:
message = (str(e) + '\n '
+ 'Failed to view input data as a series of quaternions. '
+ 'Please ensure that the last dimension has size divisible by 4.\n '
+ 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))
raise ValueError(message)
# special case: don't create an axis for a single quaternion, to
# match the output of `as_float_array`
if av.shape[-1] == 1:
av = av.reshape(a.shape[:-1])
return av
def from_float_array(a):
return as_quat_array(a)
def as_spinor_array(a):
"""View a quaternion array as spinors in two-complex representation
This function is relatively slow and scales poorly, because memory
copying is apparently involved -- I think it's due to the
"advanced indexing" required to swap the columns.
"""
a = np.atleast_1d(a)
assert a.dtype == np.dtype(np.quaternion)
# I'm not sure why it has to be so complicated, but all of these steps
# appear to be necessary in this case.
return a.view(np.float).reshape(a.shape + (4,))[..., [0, 3, 2, 1]].ravel().view(np.complex).reshape(a.shape + (2,))
def as_rotation_matrix(q):
"""Convert input quaternion to 3x3 rotation matrix
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,3). This matrix should multiply (from
the left) a column vector to produce the rotated column vector.
Raises
------
ZeroDivisionError
If any of the input quaternions have norm 0.0.
"""
if q.shape == () and not isinstance(q, np.ndarray): # This is just a single quaternion
n = q.norm()
if n == 0.0:
raise ZeroDivisionError("Input to `as_rotation_matrix({0})` has zero norm".format(q))
elif abs(n-1.0) < _eps: # Input q is basically normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2), 2*(q.x*q.y - q.z*q.w), 2*(q.x*q.z + q.y*q.w)],
[2*(q.x*q.y + q.z*q.w), 1 - 2*(q.x**2 + q.z**2), 2*(q.y*q.z - q.x*q.w)],
[2*(q.x*q.z - q.y*q.w), 2*(q.y*q.z + q.x*q.w), 1 - 2*(q.x**2 + q.y**2)]
])
else: # Input q is not normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2)/n, 2*(q.x*q.y - q.z*q.w)/n, 2*(q.x*q.z + q.y*q.w)/n],
[2*(q.x*q.y + q.z*q.w)/n, 1 - 2*(q.x**2 + q.z**2)/n, 2*(q.y*q.z - q.x*q.w)/n],
[2*(q.x*q.z - q.y*q.w)/n, 2*(q.y*q.z + q.x*q.w)/n, 1 - 2*(q.x**2 + q.y**2)/n]
])
else: # This is an array of quaternions
n = np.norm(q)
if np.any(n == 0.0):
raise ZeroDivisionError("Array input to `as_rotation_matrix` has at least one element with zero norm")
else: # Assume input q is not normalized
m = np.empty(q.shape + (3, 3))
q = as_float_array(q)
m[..., 0, 0] = 1.0 - 2*(q[..., 2]**2 + q[..., 3]**2)/n
m[..., 0, 1] = 2*(q[..., 1]*q[..., 2] - q[..., 3]*q[..., 0])/n
m[..., 0, 2] = 2*(q[..., 1]*q[..., 3] + q[..., 2]*q[..., 0])/n
m[..., 1, 0] = 2*(q[..., 1]*q[..., 2] + q[..., 3]*q[..., 0])/n
m[..., 1, 1] = 1.0 - 2*(q[..., 1]**2 + q[..., 3]**2)/n
m[..., 1, 2] = 2*(q[..., 2]*q[..., 3] - q[..., 1]*q[..., 0])/n
m[..., 2, 0] = 2*(q[..., 1]*q[..., 3] - q[..., 2]*q[..., 0])/n
m[..., 2, 1] = 2*(q[..., 2]*q[..., 3] + q[..., 1]*q[..., 0])/n
m[..., 2, 2] = 1.0 - 2*(q[..., 1]**2 + q[..., 2]**2)/n
return m
def from_rotation_matrix(rot, nonorthogonal=True):
"""Convert input 3x3 rotation matrix to unit quaternion
By default, if scipy.linalg is available, this function uses
Bar-Itzhack's algorithm to allow for non-orthogonal matrices.
[J. Guidance, Vol. 23, No. 6, p. 1085 <http://dx.doi.org/10.2514/2.4654>]
This will almost certainly be quite a bit slower than simpler versions,
though it will be more robust to numerical errors in the rotation matrix.
Also note that Bar-Itzhack uses some pretty weird conventions. The last
component of the quaternion appears to represent the scalar, and the
quaternion itself is conjugated relative to the convention used
throughout this module.
If scipy.linalg is not available or if the optional
`nonorthogonal` parameter is set to `False`, this function falls
back to the possibly faster, but less robust, algorithm of Markley
[J. Guidance, Vol. 31, No. 2, p. 440
<http://dx.doi.org/10.2514/1.31730>].
Parameters
----------
rot: (...Nx3x3) float array
Each 3x3 matrix represents a rotation by multiplying (from the left)
a column vector to produce a rotated column vector. Note that this
input may actually have ndims>3; it is just assumed that the last
two dimensions have size 3, representing the matrix.
nonorthogonal: bool, optional
If scipy.linalg is available, use the more robust algorithm of
Bar-Itzhack. Default value is True.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-2].
Raises
------
LinAlgError
If any of the eigenvalue solutions does not converge
"""
try:
from scipy import linalg
except ImportError:
linalg = False
rot = np.array(rot, copy=False)
shape = rot.shape[:-2]
if linalg and nonorthogonal:
from operator import mul
from functools import reduce
K3 = np.empty(shape+(4, 4))
K3[..., 0, 0] = (rot[..., 0, 0] - rot[..., 1, 1] - rot[..., 2, 2])/3.0
K3[..., 0, 1] = (rot[..., 1, 0] + rot[..., 0, 1])/3.0
K3[..., 0, 2] = (rot[..., 2, 0] + rot[..., 0, 2])/3.0
K3[..., 0, 3] = (rot[..., 1, 2] - rot[..., 2, 1])/3.0
K3[..., 1, 0] = K3[..., 0, 1]
K3[..., 1, 1] = (rot[..., 1, 1] - rot[..., 0, 0] - rot[..., 2, 2])/3.0
K3[..., 1, 2] = (rot[..., 2, 1] + rot[..., 1, 2])/3.0
K3[..., 1, 3] = (rot[..., 2, 0] - rot[..., 0, 2])/3.0
K3[..., 2, 0] = K3[..., 0, 2]
K3[..., 2, 1] = K3[..., 1, 2]
K3[..., 2, 2] = (rot[..., 2, 2] - rot[..., 0, 0] - rot[..., 1, 1])/3.0
K3[..., 2, 3] = (rot[..., 0, 1] - rot[..., 1, 0])/3.0
K3[..., 3, 0] = K3[..., 0, 3]
K3[..., 3, 1] = K3[..., 1, 3]
K3[..., 3, 2] = K3[..., 2, 3]
K3[..., 3, 3] = (rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2])/3.0
if not shape:
q = zero.copy()
eigvals, eigvecs = linalg.eigh(K3.T, eigvals=(3, 3))
q.components[0] = eigvecs[-1]
q.components[1:] = -eigvecs[:-1].flatten()
return q
else:
q = np.empty(shape+(4,), dtype=np.float)
for flat_index in range(reduce(mul, shape)):
multi_index = np.unravel_index(flat_index, shape)
eigvals, eigvecs = linalg.eigh(K3[multi_index], eigvals=(3, 3))
q[multi_index+(0,)] = eigvecs[-1]
q[multi_index+(slice(1,None),)] = -eigvecs[:-1].flatten()
return as_quat_array(q)
else: # No scipy.linalg or not `nonorthogonal`
diagonals = np.empty(shape+(4,))
diagonals[..., 0] = rot[..., 0, 0]
diagonals[..., 1] = rot[..., 1, 1]
diagonals[..., 2] = rot[..., 2, 2]
diagonals[..., 3] = rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2]
indices = np.argmax(diagonals, axis=-1)
q = diagonals # reuse storage space
indices_i = (indices == 0)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 1] = 1 + rot_i[..., 0, 0] - rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 2] = rot_i[..., 0, 1] + rot_i[..., 1, 0]
q[indices_i, 3] = rot_i[..., 0, 2] + rot_i[..., 2, 0]
indices_i = (indices == 1)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 1] = rot_i[..., 1, 0] + rot_i[..., 0, 1]
q[indices_i, 2] = 1 - rot_i[..., 0, 0] + rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 3] = rot_i[..., 1, 2] + rot_i[..., 2, 1]
indices_i = (indices == 2)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q[indices_i, 1] = rot_i[..., 2, 0] + rot_i[..., 0, 2]
q[indices_i, 2] = rot_i[..., 2, 1] + rot_i[..., 1, 2]
q[indices_i, 3] = 1 - rot_i[..., 0, 0] - rot_i[..., 1, 1] + rot_i[..., 2, 2]
indices_i = (indices == 3)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = 1 + rot_i[..., 0, 0] + rot_i[..., 1, 1] + rot_i[..., 2, 2]
q[indices_i, 1] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 2] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 3] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q /= np.linalg.norm(q, axis=-1)[..., np.newaxis]
return as_quat_array(q)
def as_rotation_vector(q):
"""Convert input quaternion to the axis-angle representation
Note that if any of the input quaternions has norm zero, no error is
raised, but NaNs will appear in the output.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,). Each vector represents the axis of
the rotation, with norm proportional to the angle of the rotation in
radians.
"""
return as_float_array(2*np.log(np.normalized(q)))[..., 1:]
def from_rotation_vector(rot):
"""Convert input 3-vector in axis-angle representation to unit quaternion
Parameters
----------
rot: (Nx3) float array
Each vector represents the axis of the rotation, with norm
proportional to the angle of the rotation in radians.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-1].
"""
rot = np.array(rot, copy=False)
quats = np.zeros(rot.shape[:-1]+(4,))
quats[..., 1:] = rot[...]/2
quats = as_quat_array(quats)
return np.exp(quats)
def as_euler_angles(q):
"""Open Pandora's Box
If somebody is trying to make you use Euler angles, tell them no, and
walk away, and go and tell your mum.
You don't want to use Euler angles. They are awful. Stay away. It's
one thing to convert from Euler angles to quaternions; at least you're
moving in the right direction. But to go the other way?! It's just not
right.
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles are naturally in radians.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
alpha_beta_gamma: float array
Output shape is q.shape+(3,). These represent the angles (alpha,
beta, gamma) in radians, where the normalized input quaternion
represents `exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)`.
Raises
------
AllHell
...if you try to actually use Euler angles, when you could have
been using quaternions like a sensible person.
"""
alpha_beta_gamma = np.empty(q.shape + (3,), dtype=np.float)
n = np.norm(q)
q = as_float_array(q)
alpha_beta_gamma[..., 0] = np.arctan2(q[..., 3], q[..., 0]) + np.arctan2(-q[..., 1], q[..., 2])
alpha_beta_gamma[..., 1] = 2*np.arccos(np.sqrt((q[..., 0]**2 + q[..., 3]**2)/n))
alpha_beta_gamma[..., 2] = np.arctan2(q[..., 3], q[..., 0]) - np.arctan2(-q[..., 1], q[..., 2])
return alpha_beta_gamma
def from_euler_angles(alpha_beta_gamma, beta=None, gamma=None):
"""Improve your life drastically
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles naturally must be in radians for this to make any sense.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
alpha_beta_gamma: float or array of floats
This argument may either contain an array with last dimension of
size 3, where those three elements describe the (alpha, beta, gamma)
radian values for each rotation; or it may contain just the alpha
values, in which case the next two arguments must also be given.
beta: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and third arguments.
gamma: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and second arguments.
Returns
-------
R: quaternion array
The shape of this array will be the same as the input, except that
the last dimension will be removed.
"""
# Figure out the input angles from either type of input
if gamma is None:
alpha_beta_gamma = np.asarray(alpha_beta_gamma, dtype=np.double)
alpha = alpha_beta_gamma[..., 0]
beta = alpha_beta_gamma[..., 1]
gamma = alpha_beta_gamma[..., 2]
else:
alpha = np.asarray(alpha_beta_gamma, dtype=np.double)
beta = np.asarray(beta, dtype=np.double)
gamma = np.asarray(gamma, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(alpha, beta, gamma).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(beta/2)*np.cos((alpha+gamma)/2) # scalar quaternion components
R[..., 1] = -np.sin(beta/2)*np.sin((alpha-gamma)/2) # x quaternion components
R[..., 2] = np.sin(beta/2)*np.cos((alpha-gamma)/2) # y quaternion components
R[..., 3] = np.cos(beta/2)*np.sin((alpha+gamma)/2) # z quaternion components
return as_quat_array(R)
def as_spherical_coords(q):
"""Return the spherical coordinates corresponding to this quaternion
Obviously, spherical coordinates do not contain as much information as a
quaternion, so this function does lose some information. However, the
returned spherical coordinates will represent the point(s) on the sphere
to which the input quaternion(s) rotate the z axis.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must be nonzero
Returns
-------
vartheta_varphi: float array
Output shape is q.shape+(2,). These represent the angles (vartheta,
varphi) in radians, where the normalized input quaternion represents
`exp(varphi*z/2) * exp(vartheta*y/2)`, up to an arbitrary inital
rotation about `z`.
"""
return as_euler_angles(q)[..., 1::-1]
def from_spherical_coords(theta_phi, phi=None):
"""Return the quaternion corresponding to these spherical coordinates
Assumes the spherical coordinates correspond to the quaternion R via
R = exp(phi*z/2) * exp(theta*y/2)
The angles naturally must be in radians for this to make any sense.
Note that this quaternion rotates `z` onto the point with the given
spherical coordinates, but also rotates `x` and `y` onto the usual basis
vectors (theta and phi, respectively) at that point.
Parameters
----------
theta_phi: float or array of floats
This argument may either contain an array with last dimension of
size 2, where those two elements describe the (theta, phi) values in
radians for each point; or it may contain just the theta values in
radians, in which case the next argument must also be given.
phi: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first argument.
Returns
-------
R: quaternion array
If the second argument is not given to this function, the shape
will be the same as the input shape except for the last dimension,
which will be removed. If the second argument is given, this
output array will have the shape resulting from broadcasting the
two input arrays against each other.
"""
# Figure out the input angles from either type of input
if phi is None:
theta_phi = np.asarray(theta_phi, dtype=np.double)
theta = theta_phi[..., 0]
phi = theta_phi[..., 1]
else:
theta = np.asarray(theta_phi, dtype=np.double)
phi = np.asarray(phi, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(theta, phi).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(phi/2)*np.cos(theta/2) # scalar quaternion components
R[..., 1] = -np.sin(phi/2)*np.sin(theta/2) # x quaternion components
R[..., 2] = np.cos(phi/2)*np.sin(theta/2) # y quaternion components
R[..., 3] = np.sin(phi/2)*np.cos(theta/2) # z quaternion components
return as_quat_array(R)
def rotate_vectors(R, v, axis=-1):
"""Rotate vectors by given quaternions
For simplicity, this function simply converts the input
quaternion(s) to a matrix, and rotates the input vector(s) by the
usual matrix multiplication. However, it should be noted that if
each input quaternion is only used to rotate a single vector, it
is more efficient (in terms of operation counts) to use the
formula
v' = v + 2 * r x (s * v + r x v) / m
where x represents the cross product, s and r are the scalar and
vector parts of the quaternion, respectively, and m is the sum of
the squares of the components of the quaternion. If you are
looping over a very large number of quaternions, and just rotating
a single vector each time, you might want to implement that
alternative algorithm using numba (or something that doesn't use
python).
Parameters
==========
R: quaternion array
Quaternions by which to rotate the input vectors
v: float array
Three-vectors to be rotated.
axis: int
Axis of the `v` array to use as the vector dimension. This
axis of `v` must have length 3.
Returns
=======
vprime: float array
The rotated vectors. This array has shape R.shape+v.shape.
"""
R = np.asarray(R, dtype=np.quaternion)
v = np.asarray(v, dtype=float)
if v.ndim < 1 or 3 not in v.shape:
raise ValueError("Input `v` does not have at least one dimension of length 3")
if v.shape[axis] != 3:
raise ValueError("Input `v` axis {0} has length {1}, not 3.".format(axis, v.shape[axis]))
m = as_rotation_matrix(R)
m_axes = list(range(m.ndim))
v_axes = list(range(m.ndim, m.ndim+v.ndim))
mv_axes = list(v_axes)
mv_axes[axis] = m_axes[-2]
mv_axes = m_axes[:-2] + mv_axes
v_axes[axis] = m_axes[-1]
return np.einsum(m, m_axes, v, v_axes, mv_axes)
def allclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False, verbose=False):
"""
Returns True if two arrays are element-wise equal within a tolerance.
This function is essentially a wrapper for the `quaternion.isclose`
function, but returns a single boolean value of True if all elements
of the output from `quaternion.isclose` are True, and False otherwise.
This function also adds the option.
Note that this function has stricter tolerances than the
`numpy.allclose` function, as well as the additional `verbose` option.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
verbose : bool
If the return value is False,
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
See Also
--------
isclose, numpy.all, numpy.any, numpy.allclose
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
Notes
-----
If the following equation is element-wise True, then allclose returns
True.
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`allclose(a, b)` might be different from `allclose(b, a)` in
some rare cases.
"""
close = isclose(a, b, rtol=rtol, atol=atol, equal_nan=equal_nan)
result = np.all(close)
if verbose and not result:
print('Non-close values:')
for i in np.argwhere(close == False):
i = tuple(i)
print('\n x[{0}]={1}\n y[{0}]={2}'.format(i, a[i], b[i]))
return result
|
moble/quaternion
|
__init__.py
|
allclose
|
python
|
def allclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False, verbose=False):
close = isclose(a, b, rtol=rtol, atol=atol, equal_nan=equal_nan)
result = np.all(close)
if verbose and not result:
print('Non-close values:')
for i in np.argwhere(close == False):
i = tuple(i)
print('\n x[{0}]={1}\n y[{0}]={2}'.format(i, a[i], b[i]))
return result
|
Returns True if two arrays are element-wise equal within a tolerance.
This function is essentially a wrapper for the `quaternion.isclose`
function, but returns a single boolean value of True if all elements
of the output from `quaternion.isclose` are True, and False otherwise.
This function also adds the option.
Note that this function has stricter tolerances than the
`numpy.allclose` function, as well as the additional `verbose` option.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
verbose : bool
If the return value is False,
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
See Also
--------
isclose, numpy.all, numpy.any, numpy.allclose
Returns
-------
allclose : bool
Returns True if the two arrays are equal within the given
tolerance; False otherwise.
Notes
-----
If the following equation is element-wise True, then allclose returns
True.
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`allclose(a, b)` might be different from `allclose(b, a)` in
some rare cases.
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/__init__.py#L725-L786
|
[
"def isclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False):\n \"\"\"\n Returns a boolean array where two arrays are element-wise equal within a\n tolerance.\n\n This function is essentially a copy of the `numpy.isclose` function,\n with different default tolerances and one minor changes necessary to\n deal correctly with quaternions.\n\n The tolerance values are positive, typically very small numbers. The\n relative difference (`rtol` * abs(`b`)) and the absolute difference\n `atol` are added together to compare against the absolute difference\n between `a` and `b`.\n\n Parameters\n ----------\n a, b : array_like\n Input arrays to compare.\n rtol : float\n The relative tolerance parameter (see Notes).\n atol : float\n The absolute tolerance parameter (see Notes).\n equal_nan : bool\n Whether to compare NaN's as equal. If True, NaN's in `a` will be\n considered equal to NaN's in `b` in the output array.\n\n Returns\n -------\n y : array_like\n Returns a boolean array of where `a` and `b` are equal within the\n given tolerance. If both `a` and `b` are scalars, returns a single\n boolean value.\n\n See Also\n --------\n allclose\n\n Notes\n -----\n For finite values, isclose uses the following equation to test whether\n two floating point values are equivalent:\n\n absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))\n\n The above equation is not symmetric in `a` and `b`, so that\n `isclose(a, b)` might be different from `isclose(b, a)` in\n some rare cases.\n\n Examples\n --------\n >>> quaternion.isclose([1e10*quaternion.x, 1e-7*quaternion.y], [1.00001e10*quaternion.x, 1e-8*quaternion.y],\n ... rtol=1.e-5, atol=1.e-8)\n array([True, False])\n >>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.00001e10*quaternion.x, 1e-9*quaternion.y],\n ... rtol=1.e-5, atol=1.e-8)\n array([True, True])\n >>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.0001e10*quaternion.x, 1e-9*quaternion.y],\n ... rtol=1.e-5, atol=1.e-8)\n array([False, True])\n >>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y])\n array([True, False])\n >>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y], equal_nan=True)\n array([True, True])\n \"\"\"\n def within_tol(x, y, atol, rtol):\n with np.errstate(invalid='ignore'):\n result = np.less_equal(abs(x-y), atol + rtol * abs(y))\n if np.isscalar(a) and np.isscalar(b):\n result = bool(result)\n return result\n\n x = np.array(a, copy=False, subok=True, ndmin=1)\n y = np.array(b, copy=False, subok=True, ndmin=1)\n\n # Make sure y is an inexact type to avoid bad behavior on abs(MIN_INT).\n # This will cause casting of x later. Also, make sure to allow subclasses\n # (e.g., for numpy.ma).\n try:\n dt = np.result_type(y, 1.)\n except TypeError:\n dt = np.dtype(np.quaternion)\n y = np.array(y, dtype=dt, copy=False, subok=True)\n\n xfin = np.isfinite(x)\n yfin = np.isfinite(y)\n if np.all(xfin) and np.all(yfin):\n return within_tol(x, y, atol, rtol)\n else:\n finite = xfin & yfin\n cond = np.zeros_like(finite, subok=True)\n # Because we're using boolean indexing, x & y must be the same shape.\n # Ideally, we'd just do x, y = broadcast_arrays(x, y). It's in\n # lib.stride_tricks, though, so we can't import it here.\n x = x * np.ones_like(cond)\n y = y * np.ones_like(cond)\n # Avoid subtraction with infinite/nan values...\n cond[finite] = within_tol(x[finite], y[finite], atol, rtol)\n # Check for equality of infinite values...\n cond[~finite] = (x[~finite] == y[~finite])\n if equal_nan:\n # Make NaN == NaN\n both_nan = np.isnan(x) & np.isnan(y)\n cond[both_nan] = both_nan[both_nan]\n\n if np.isscalar(a) and np.isscalar(b):\n return bool(cond)\n else:\n return cond\n"
] |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import division, print_function, absolute_import
import numpy as np
from .numpy_quaternion import (quaternion, _eps,
slerp_evaluate, squad_evaluate,
# slerp_vectorized, squad_vectorized,
# slerp, squad,
)
from .quaternion_time_series import slerp, squad, integrate_angular_velocity, minimal_rotation
from .calculus import derivative, definite_integral, indefinite_integral
from .means import mean_rotor_in_chordal_metric, optimal_alignment_in_chordal_metric
from ._version import __version__
__doc_title__ = "Quaternion dtype for NumPy"
__doc__ = "Adds a quaternion dtype to NumPy."
__all__ = ['quaternion',
'as_quat_array', 'as_spinor_array',
'as_float_array', 'from_float_array',
'as_rotation_matrix', 'from_rotation_matrix',
'as_rotation_vector', 'from_rotation_vector',
'as_euler_angles', 'from_euler_angles',
'as_spherical_coords', 'from_spherical_coords',
'rotate_vectors', 'allclose',
'rotor_intrinsic_distance', 'rotor_chordal_distance',
'rotation_intrinsic_distance', 'rotation_chordal_distance',
'slerp_evaluate', 'squad_evaluate',
'zero', 'one', 'x', 'y', 'z', 'integrate_angular_velocity',
'squad', 'slerp', 'derivative', 'definite_integral', 'indefinite_integral']
if 'quaternion' in np.__dict__:
raise RuntimeError('The NumPy package already has a quaternion type')
np.quaternion = quaternion
np.typeDict['quaternion'] = np.dtype(quaternion)
zero = np.quaternion(0, 0, 0, 0)
one = np.quaternion(1, 0, 0, 0)
x = np.quaternion(0, 1, 0, 0)
y = np.quaternion(0, 0, 1, 0)
z = np.quaternion(0, 0, 0, 1)
rotor_intrinsic_distance = np.rotor_intrinsic_distance
rotor_chordal_distance = np.rotor_chordal_distance
rotation_intrinsic_distance = np.rotation_intrinsic_distance
rotation_chordal_distance = np.rotation_chordal_distance
def as_float_array(a):
"""View the quaternion array as an array of floats
This function is fast (of order 1 microsecond) because no data is
copied; the returned quantity is just a "view" of the original.
The output view has one more dimension (of size 4) than the input
array, but is otherwise the same shape.
"""
return np.asarray(a, dtype=np.quaternion).view((np.double, 4))
def as_quat_array(a):
"""View a float array as an array of quaternions
The input array must have a final dimension whose size is
divisible by four (or better yet *is* 4), because successive
indices in that last dimension will be considered successive
components of the output quaternion.
This function is usually fast (of order 1 microsecond) because no
data is copied; the returned quantity is just a "view" of the
original. However, if the input array is not C-contiguous
(basically, as you increment the index into the last dimension of
the array, you just move to the neighboring float in memory), the
data will need to be copied which may be quite slow. Therefore,
you should try to ensure that the input array is in that order.
Slices and transpositions will frequently break that rule.
We will not convert back from a two-spinor array because there is
no unique convention for them, so I don't want to mess with that.
Also, we want to discourage users from the slow, memory-copying
process of swapping columns required for useful definitions of
the two-spinors.
"""
a = np.asarray(a, dtype=np.double)
# fast path
if a.shape == (4,):
return quaternion(a[0], a[1], a[2], a[3])
# view only works if the last axis is C-contiguous
if not a.flags['C_CONTIGUOUS'] or a.strides[-1] != a.itemsize:
a = a.copy(order='C')
try:
av = a.view(np.quaternion)
except ValueError as e:
message = (str(e) + '\n '
+ 'Failed to view input data as a series of quaternions. '
+ 'Please ensure that the last dimension has size divisible by 4.\n '
+ 'Input data has shape {0} and dtype {1}.'.format(a.shape, a.dtype))
raise ValueError(message)
# special case: don't create an axis for a single quaternion, to
# match the output of `as_float_array`
if av.shape[-1] == 1:
av = av.reshape(a.shape[:-1])
return av
def from_float_array(a):
return as_quat_array(a)
def as_spinor_array(a):
"""View a quaternion array as spinors in two-complex representation
This function is relatively slow and scales poorly, because memory
copying is apparently involved -- I think it's due to the
"advanced indexing" required to swap the columns.
"""
a = np.atleast_1d(a)
assert a.dtype == np.dtype(np.quaternion)
# I'm not sure why it has to be so complicated, but all of these steps
# appear to be necessary in this case.
return a.view(np.float).reshape(a.shape + (4,))[..., [0, 3, 2, 1]].ravel().view(np.complex).reshape(a.shape + (2,))
def as_rotation_matrix(q):
"""Convert input quaternion to 3x3 rotation matrix
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,3). This matrix should multiply (from
the left) a column vector to produce the rotated column vector.
Raises
------
ZeroDivisionError
If any of the input quaternions have norm 0.0.
"""
if q.shape == () and not isinstance(q, np.ndarray): # This is just a single quaternion
n = q.norm()
if n == 0.0:
raise ZeroDivisionError("Input to `as_rotation_matrix({0})` has zero norm".format(q))
elif abs(n-1.0) < _eps: # Input q is basically normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2), 2*(q.x*q.y - q.z*q.w), 2*(q.x*q.z + q.y*q.w)],
[2*(q.x*q.y + q.z*q.w), 1 - 2*(q.x**2 + q.z**2), 2*(q.y*q.z - q.x*q.w)],
[2*(q.x*q.z - q.y*q.w), 2*(q.y*q.z + q.x*q.w), 1 - 2*(q.x**2 + q.y**2)]
])
else: # Input q is not normalized
return np.array([
[1 - 2*(q.y**2 + q.z**2)/n, 2*(q.x*q.y - q.z*q.w)/n, 2*(q.x*q.z + q.y*q.w)/n],
[2*(q.x*q.y + q.z*q.w)/n, 1 - 2*(q.x**2 + q.z**2)/n, 2*(q.y*q.z - q.x*q.w)/n],
[2*(q.x*q.z - q.y*q.w)/n, 2*(q.y*q.z + q.x*q.w)/n, 1 - 2*(q.x**2 + q.y**2)/n]
])
else: # This is an array of quaternions
n = np.norm(q)
if np.any(n == 0.0):
raise ZeroDivisionError("Array input to `as_rotation_matrix` has at least one element with zero norm")
else: # Assume input q is not normalized
m = np.empty(q.shape + (3, 3))
q = as_float_array(q)
m[..., 0, 0] = 1.0 - 2*(q[..., 2]**2 + q[..., 3]**2)/n
m[..., 0, 1] = 2*(q[..., 1]*q[..., 2] - q[..., 3]*q[..., 0])/n
m[..., 0, 2] = 2*(q[..., 1]*q[..., 3] + q[..., 2]*q[..., 0])/n
m[..., 1, 0] = 2*(q[..., 1]*q[..., 2] + q[..., 3]*q[..., 0])/n
m[..., 1, 1] = 1.0 - 2*(q[..., 1]**2 + q[..., 3]**2)/n
m[..., 1, 2] = 2*(q[..., 2]*q[..., 3] - q[..., 1]*q[..., 0])/n
m[..., 2, 0] = 2*(q[..., 1]*q[..., 3] - q[..., 2]*q[..., 0])/n
m[..., 2, 1] = 2*(q[..., 2]*q[..., 3] + q[..., 1]*q[..., 0])/n
m[..., 2, 2] = 1.0 - 2*(q[..., 1]**2 + q[..., 2]**2)/n
return m
def from_rotation_matrix(rot, nonorthogonal=True):
"""Convert input 3x3 rotation matrix to unit quaternion
By default, if scipy.linalg is available, this function uses
Bar-Itzhack's algorithm to allow for non-orthogonal matrices.
[J. Guidance, Vol. 23, No. 6, p. 1085 <http://dx.doi.org/10.2514/2.4654>]
This will almost certainly be quite a bit slower than simpler versions,
though it will be more robust to numerical errors in the rotation matrix.
Also note that Bar-Itzhack uses some pretty weird conventions. The last
component of the quaternion appears to represent the scalar, and the
quaternion itself is conjugated relative to the convention used
throughout this module.
If scipy.linalg is not available or if the optional
`nonorthogonal` parameter is set to `False`, this function falls
back to the possibly faster, but less robust, algorithm of Markley
[J. Guidance, Vol. 31, No. 2, p. 440
<http://dx.doi.org/10.2514/1.31730>].
Parameters
----------
rot: (...Nx3x3) float array
Each 3x3 matrix represents a rotation by multiplying (from the left)
a column vector to produce a rotated column vector. Note that this
input may actually have ndims>3; it is just assumed that the last
two dimensions have size 3, representing the matrix.
nonorthogonal: bool, optional
If scipy.linalg is available, use the more robust algorithm of
Bar-Itzhack. Default value is True.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-2].
Raises
------
LinAlgError
If any of the eigenvalue solutions does not converge
"""
try:
from scipy import linalg
except ImportError:
linalg = False
rot = np.array(rot, copy=False)
shape = rot.shape[:-2]
if linalg and nonorthogonal:
from operator import mul
from functools import reduce
K3 = np.empty(shape+(4, 4))
K3[..., 0, 0] = (rot[..., 0, 0] - rot[..., 1, 1] - rot[..., 2, 2])/3.0
K3[..., 0, 1] = (rot[..., 1, 0] + rot[..., 0, 1])/3.0
K3[..., 0, 2] = (rot[..., 2, 0] + rot[..., 0, 2])/3.0
K3[..., 0, 3] = (rot[..., 1, 2] - rot[..., 2, 1])/3.0
K3[..., 1, 0] = K3[..., 0, 1]
K3[..., 1, 1] = (rot[..., 1, 1] - rot[..., 0, 0] - rot[..., 2, 2])/3.0
K3[..., 1, 2] = (rot[..., 2, 1] + rot[..., 1, 2])/3.0
K3[..., 1, 3] = (rot[..., 2, 0] - rot[..., 0, 2])/3.0
K3[..., 2, 0] = K3[..., 0, 2]
K3[..., 2, 1] = K3[..., 1, 2]
K3[..., 2, 2] = (rot[..., 2, 2] - rot[..., 0, 0] - rot[..., 1, 1])/3.0
K3[..., 2, 3] = (rot[..., 0, 1] - rot[..., 1, 0])/3.0
K3[..., 3, 0] = K3[..., 0, 3]
K3[..., 3, 1] = K3[..., 1, 3]
K3[..., 3, 2] = K3[..., 2, 3]
K3[..., 3, 3] = (rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2])/3.0
if not shape:
q = zero.copy()
eigvals, eigvecs = linalg.eigh(K3.T, eigvals=(3, 3))
q.components[0] = eigvecs[-1]
q.components[1:] = -eigvecs[:-1].flatten()
return q
else:
q = np.empty(shape+(4,), dtype=np.float)
for flat_index in range(reduce(mul, shape)):
multi_index = np.unravel_index(flat_index, shape)
eigvals, eigvecs = linalg.eigh(K3[multi_index], eigvals=(3, 3))
q[multi_index+(0,)] = eigvecs[-1]
q[multi_index+(slice(1,None),)] = -eigvecs[:-1].flatten()
return as_quat_array(q)
else: # No scipy.linalg or not `nonorthogonal`
diagonals = np.empty(shape+(4,))
diagonals[..., 0] = rot[..., 0, 0]
diagonals[..., 1] = rot[..., 1, 1]
diagonals[..., 2] = rot[..., 2, 2]
diagonals[..., 3] = rot[..., 0, 0] + rot[..., 1, 1] + rot[..., 2, 2]
indices = np.argmax(diagonals, axis=-1)
q = diagonals # reuse storage space
indices_i = (indices == 0)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 1] = 1 + rot_i[..., 0, 0] - rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 2] = rot_i[..., 0, 1] + rot_i[..., 1, 0]
q[indices_i, 3] = rot_i[..., 0, 2] + rot_i[..., 2, 0]
indices_i = (indices == 1)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 1] = rot_i[..., 1, 0] + rot_i[..., 0, 1]
q[indices_i, 2] = 1 - rot_i[..., 0, 0] + rot_i[..., 1, 1] - rot_i[..., 2, 2]
q[indices_i, 3] = rot_i[..., 1, 2] + rot_i[..., 2, 1]
indices_i = (indices == 2)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q[indices_i, 1] = rot_i[..., 2, 0] + rot_i[..., 0, 2]
q[indices_i, 2] = rot_i[..., 2, 1] + rot_i[..., 1, 2]
q[indices_i, 3] = 1 - rot_i[..., 0, 0] - rot_i[..., 1, 1] + rot_i[..., 2, 2]
indices_i = (indices == 3)
if np.any(indices_i):
if indices_i.shape == ():
indices_i = Ellipsis
rot_i = rot[indices_i, :, :]
q[indices_i, 0] = 1 + rot_i[..., 0, 0] + rot_i[..., 1, 1] + rot_i[..., 2, 2]
q[indices_i, 1] = rot_i[..., 2, 1] - rot_i[..., 1, 2]
q[indices_i, 2] = rot_i[..., 0, 2] - rot_i[..., 2, 0]
q[indices_i, 3] = rot_i[..., 1, 0] - rot_i[..., 0, 1]
q /= np.linalg.norm(q, axis=-1)[..., np.newaxis]
return as_quat_array(q)
def as_rotation_vector(q):
"""Convert input quaternion to the axis-angle representation
Note that if any of the input quaternions has norm zero, no error is
raised, but NaNs will appear in the output.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
rot: float array
Output shape is q.shape+(3,). Each vector represents the axis of
the rotation, with norm proportional to the angle of the rotation in
radians.
"""
return as_float_array(2*np.log(np.normalized(q)))[..., 1:]
def from_rotation_vector(rot):
"""Convert input 3-vector in axis-angle representation to unit quaternion
Parameters
----------
rot: (Nx3) float array
Each vector represents the axis of the rotation, with norm
proportional to the angle of the rotation in radians.
Returns
-------
q: array of quaternions
Unit quaternions resulting in rotations corresponding to input
rotations. Output shape is rot.shape[:-1].
"""
rot = np.array(rot, copy=False)
quats = np.zeros(rot.shape[:-1]+(4,))
quats[..., 1:] = rot[...]/2
quats = as_quat_array(quats)
return np.exp(quats)
def as_euler_angles(q):
"""Open Pandora's Box
If somebody is trying to make you use Euler angles, tell them no, and
walk away, and go and tell your mum.
You don't want to use Euler angles. They are awful. Stay away. It's
one thing to convert from Euler angles to quaternions; at least you're
moving in the right direction. But to go the other way?! It's just not
right.
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles are naturally in radians.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must all be nonzero
Returns
-------
alpha_beta_gamma: float array
Output shape is q.shape+(3,). These represent the angles (alpha,
beta, gamma) in radians, where the normalized input quaternion
represents `exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)`.
Raises
------
AllHell
...if you try to actually use Euler angles, when you could have
been using quaternions like a sensible person.
"""
alpha_beta_gamma = np.empty(q.shape + (3,), dtype=np.float)
n = np.norm(q)
q = as_float_array(q)
alpha_beta_gamma[..., 0] = np.arctan2(q[..., 3], q[..., 0]) + np.arctan2(-q[..., 1], q[..., 2])
alpha_beta_gamma[..., 1] = 2*np.arccos(np.sqrt((q[..., 0]**2 + q[..., 3]**2)/n))
alpha_beta_gamma[..., 2] = np.arctan2(q[..., 3], q[..., 0]) - np.arctan2(-q[..., 1], q[..., 2])
return alpha_beta_gamma
def from_euler_angles(alpha_beta_gamma, beta=None, gamma=None):
"""Improve your life drastically
Assumes the Euler angles correspond to the quaternion R via
R = exp(alpha*z/2) * exp(beta*y/2) * exp(gamma*z/2)
The angles naturally must be in radians for this to make any sense.
NOTE: Before opening an issue reporting something "wrong" with this
function, be sure to read all of the following page, *especially* the
very last section about opening issues or pull requests.
<https://github.com/moble/quaternion/wiki/Euler-angles-are-horrible>
Parameters
----------
alpha_beta_gamma: float or array of floats
This argument may either contain an array with last dimension of
size 3, where those three elements describe the (alpha, beta, gamma)
radian values for each rotation; or it may contain just the alpha
values, in which case the next two arguments must also be given.
beta: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and third arguments.
gamma: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first and second arguments.
Returns
-------
R: quaternion array
The shape of this array will be the same as the input, except that
the last dimension will be removed.
"""
# Figure out the input angles from either type of input
if gamma is None:
alpha_beta_gamma = np.asarray(alpha_beta_gamma, dtype=np.double)
alpha = alpha_beta_gamma[..., 0]
beta = alpha_beta_gamma[..., 1]
gamma = alpha_beta_gamma[..., 2]
else:
alpha = np.asarray(alpha_beta_gamma, dtype=np.double)
beta = np.asarray(beta, dtype=np.double)
gamma = np.asarray(gamma, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(alpha, beta, gamma).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(beta/2)*np.cos((alpha+gamma)/2) # scalar quaternion components
R[..., 1] = -np.sin(beta/2)*np.sin((alpha-gamma)/2) # x quaternion components
R[..., 2] = np.sin(beta/2)*np.cos((alpha-gamma)/2) # y quaternion components
R[..., 3] = np.cos(beta/2)*np.sin((alpha+gamma)/2) # z quaternion components
return as_quat_array(R)
def as_spherical_coords(q):
"""Return the spherical coordinates corresponding to this quaternion
Obviously, spherical coordinates do not contain as much information as a
quaternion, so this function does lose some information. However, the
returned spherical coordinates will represent the point(s) on the sphere
to which the input quaternion(s) rotate the z axis.
Parameters
----------
q: quaternion or array of quaternions
The quaternion(s) need not be normalized, but must be nonzero
Returns
-------
vartheta_varphi: float array
Output shape is q.shape+(2,). These represent the angles (vartheta,
varphi) in radians, where the normalized input quaternion represents
`exp(varphi*z/2) * exp(vartheta*y/2)`, up to an arbitrary inital
rotation about `z`.
"""
return as_euler_angles(q)[..., 1::-1]
def from_spherical_coords(theta_phi, phi=None):
"""Return the quaternion corresponding to these spherical coordinates
Assumes the spherical coordinates correspond to the quaternion R via
R = exp(phi*z/2) * exp(theta*y/2)
The angles naturally must be in radians for this to make any sense.
Note that this quaternion rotates `z` onto the point with the given
spherical coordinates, but also rotates `x` and `y` onto the usual basis
vectors (theta and phi, respectively) at that point.
Parameters
----------
theta_phi: float or array of floats
This argument may either contain an array with last dimension of
size 2, where those two elements describe the (theta, phi) values in
radians for each point; or it may contain just the theta values in
radians, in which case the next argument must also be given.
phi: None, float, or array of floats
If this array is given, it must be able to broadcast against the
first argument.
Returns
-------
R: quaternion array
If the second argument is not given to this function, the shape
will be the same as the input shape except for the last dimension,
which will be removed. If the second argument is given, this
output array will have the shape resulting from broadcasting the
two input arrays against each other.
"""
# Figure out the input angles from either type of input
if phi is None:
theta_phi = np.asarray(theta_phi, dtype=np.double)
theta = theta_phi[..., 0]
phi = theta_phi[..., 1]
else:
theta = np.asarray(theta_phi, dtype=np.double)
phi = np.asarray(phi, dtype=np.double)
# Set up the output array
R = np.empty(np.broadcast(theta, phi).shape + (4,), dtype=np.double)
# Compute the actual values of the quaternion components
R[..., 0] = np.cos(phi/2)*np.cos(theta/2) # scalar quaternion components
R[..., 1] = -np.sin(phi/2)*np.sin(theta/2) # x quaternion components
R[..., 2] = np.cos(phi/2)*np.sin(theta/2) # y quaternion components
R[..., 3] = np.sin(phi/2)*np.cos(theta/2) # z quaternion components
return as_quat_array(R)
def rotate_vectors(R, v, axis=-1):
"""Rotate vectors by given quaternions
For simplicity, this function simply converts the input
quaternion(s) to a matrix, and rotates the input vector(s) by the
usual matrix multiplication. However, it should be noted that if
each input quaternion is only used to rotate a single vector, it
is more efficient (in terms of operation counts) to use the
formula
v' = v + 2 * r x (s * v + r x v) / m
where x represents the cross product, s and r are the scalar and
vector parts of the quaternion, respectively, and m is the sum of
the squares of the components of the quaternion. If you are
looping over a very large number of quaternions, and just rotating
a single vector each time, you might want to implement that
alternative algorithm using numba (or something that doesn't use
python).
Parameters
==========
R: quaternion array
Quaternions by which to rotate the input vectors
v: float array
Three-vectors to be rotated.
axis: int
Axis of the `v` array to use as the vector dimension. This
axis of `v` must have length 3.
Returns
=======
vprime: float array
The rotated vectors. This array has shape R.shape+v.shape.
"""
R = np.asarray(R, dtype=np.quaternion)
v = np.asarray(v, dtype=float)
if v.ndim < 1 or 3 not in v.shape:
raise ValueError("Input `v` does not have at least one dimension of length 3")
if v.shape[axis] != 3:
raise ValueError("Input `v` axis {0} has length {1}, not 3.".format(axis, v.shape[axis]))
m = as_rotation_matrix(R)
m_axes = list(range(m.ndim))
v_axes = list(range(m.ndim, m.ndim+v.ndim))
mv_axes = list(v_axes)
mv_axes[axis] = m_axes[-2]
mv_axes = m_axes[:-2] + mv_axes
v_axes[axis] = m_axes[-1]
return np.einsum(m, m_axes, v, v_axes, mv_axes)
def isclose(a, b, rtol=4*np.finfo(float).eps, atol=0.0, equal_nan=False):
"""
Returns a boolean array where two arrays are element-wise equal within a
tolerance.
This function is essentially a copy of the `numpy.isclose` function,
with different default tolerances and one minor changes necessary to
deal correctly with quaternions.
The tolerance values are positive, typically very small numbers. The
relative difference (`rtol` * abs(`b`)) and the absolute difference
`atol` are added together to compare against the absolute difference
between `a` and `b`.
Parameters
----------
a, b : array_like
Input arrays to compare.
rtol : float
The relative tolerance parameter (see Notes).
atol : float
The absolute tolerance parameter (see Notes).
equal_nan : bool
Whether to compare NaN's as equal. If True, NaN's in `a` will be
considered equal to NaN's in `b` in the output array.
Returns
-------
y : array_like
Returns a boolean array of where `a` and `b` are equal within the
given tolerance. If both `a` and `b` are scalars, returns a single
boolean value.
See Also
--------
allclose
Notes
-----
For finite values, isclose uses the following equation to test whether
two floating point values are equivalent:
absolute(`a` - `b`) <= (`atol` + `rtol` * absolute(`b`))
The above equation is not symmetric in `a` and `b`, so that
`isclose(a, b)` might be different from `isclose(b, a)` in
some rare cases.
Examples
--------
>>> quaternion.isclose([1e10*quaternion.x, 1e-7*quaternion.y], [1.00001e10*quaternion.x, 1e-8*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, False])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.00001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([True, True])
>>> quaternion.isclose([1e10*quaternion.x, 1e-8*quaternion.y], [1.0001e10*quaternion.x, 1e-9*quaternion.y],
... rtol=1.e-5, atol=1.e-8)
array([False, True])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y])
array([True, False])
>>> quaternion.isclose([quaternion.x, np.nan*quaternion.y], [quaternion.x, np.nan*quaternion.y], equal_nan=True)
array([True, True])
"""
def within_tol(x, y, atol, rtol):
with np.errstate(invalid='ignore'):
result = np.less_equal(abs(x-y), atol + rtol * abs(y))
if np.isscalar(a) and np.isscalar(b):
result = bool(result)
return result
x = np.array(a, copy=False, subok=True, ndmin=1)
y = np.array(b, copy=False, subok=True, ndmin=1)
# Make sure y is an inexact type to avoid bad behavior on abs(MIN_INT).
# This will cause casting of x later. Also, make sure to allow subclasses
# (e.g., for numpy.ma).
try:
dt = np.result_type(y, 1.)
except TypeError:
dt = np.dtype(np.quaternion)
y = np.array(y, dtype=dt, copy=False, subok=True)
xfin = np.isfinite(x)
yfin = np.isfinite(y)
if np.all(xfin) and np.all(yfin):
return within_tol(x, y, atol, rtol)
else:
finite = xfin & yfin
cond = np.zeros_like(finite, subok=True)
# Because we're using boolean indexing, x & y must be the same shape.
# Ideally, we'd just do x, y = broadcast_arrays(x, y). It's in
# lib.stride_tricks, though, so we can't import it here.
x = x * np.ones_like(cond)
y = y * np.ones_like(cond)
# Avoid subtraction with infinite/nan values...
cond[finite] = within_tol(x[finite], y[finite], atol, rtol)
# Check for equality of infinite values...
cond[~finite] = (x[~finite] == y[~finite])
if equal_nan:
# Make NaN == NaN
both_nan = np.isnan(x) & np.isnan(y)
cond[both_nan] = both_nan[both_nan]
if np.isscalar(a) and np.isscalar(b):
return bool(cond)
else:
return cond
|
moble/quaternion
|
calculus.py
|
derivative
|
python
|
def derivative(f, t):
dfdt = np.empty_like(f)
if (f.ndim == 1):
_derivative(f, t, dfdt)
elif (f.ndim == 2):
_derivative_2d(f, t, dfdt)
elif (f.ndim == 3):
_derivative_3d(f, t, dfdt)
else:
raise NotImplementedError("Taking derivatives of {0}-dimensional arrays is not yet implemented".format(f.ndim))
return dfdt
|
Fourth-order finite-differencing with non-uniform time steps
The formula for this finite difference comes from Eq. (A 5b) of "Derivative formulas and errors for non-uniformly
spaced points" by M. K. Bowen and Ronald Smith. As explained in their Eqs. (B 9b) and (B 10b), this is a
fourth-order formula -- though that's a squishy concept with non-uniform time steps.
TODO: If there are fewer than five points, the function should revert to simpler (lower-order) formulas.
|
train
|
https://github.com/moble/quaternion/blob/7a323e81b391d6892e2874073e495e0beb057e85/calculus.py#L9-L28
|
[
"def _identity_decorator_inner(fn):\n return fn\n"
] |
# Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import division, print_function, absolute_import
import numpy as np
from quaternion.numba_wrapper import njit, jit, xrange
@njit
def _derivative(f, t, dfdt):
for i in xrange(2):
t_i = t[i]
t1 = t[0]
t2 = t[1]
t3 = t[2]
t4 = t[3]
t5 = t[4]
h1 = t1 - t_i
h2 = t2 - t_i
h3 = t3 - t_i
h4 = t4 - t_i
h5 = t5 - t_i
h12 = t1 - t2
h13 = t1 - t3
h14 = t1 - t4
h15 = t1 - t5
h23 = t2 - t3
h24 = t2 - t4
h25 = t2 - t5
h34 = t3 - t4
h35 = t3 - t5
h45 = t4 - t5
dfdt[i] = (-((h2 * h3 * h4 + h2 * h3 * h5 + h2 * h4 * h5 + h3 * h4 * h5) / (h12 * h13 * h14 * h15)) * f[0]
+ ((h1 * h3 * h4 + h1 * h3 * h5 + h1 * h4 * h5 + h3 * h4 * h5) / (h12 * h23 * h24 * h25)) * f[1]
- ((h1 * h2 * h4 + h1 * h2 * h5 + h1 * h4 * h5 + h2 * h4 * h5) / (h13 * h23 * h34 * h35)) * f[2]
+ ((h1 * h2 * h3 + h1 * h2 * h5 + h1 * h3 * h5 + h2 * h3 * h5) / (h14 * h24 * h34 * h45)) * f[3]
- ((h1 * h2 * h3 + h1 * h2 * h4 + h1 * h3 * h4 + h2 * h3 * h4) / (h15 * h25 * h35 * h45)) * f[4])
for i in xrange(2, len(t) - 2):
t1 = t[i - 2]
t2 = t[i - 1]
t3 = t[i]
t4 = t[i + 1]
t5 = t[i + 2]
h1 = t1 - t3
h2 = t2 - t3
h4 = t4 - t3
h5 = t5 - t3
h12 = t1 - t2
h13 = t1 - t3
h14 = t1 - t4
h15 = t1 - t5
h23 = t2 - t3
h24 = t2 - t4
h25 = t2 - t5
h34 = t3 - t4
h35 = t3 - t5
h45 = t4 - t5
dfdt[i] = (-((h2 * h4 * h5) / (h12 * h13 * h14 * h15)) * f[i - 2]
+ ((h1 * h4 * h5) / (h12 * h23 * h24 * h25)) * f[i - 1]
- ((h1 * h2 * h4 + h1 * h2 * h5 + h1 * h4 * h5 + h2 * h4 * h5) / (h13 * h23 * h34 * h35)) * f[i]
+ ((h1 * h2 * h5) / (h14 * h24 * h34 * h45)) * f[i + 1]
- ((h1 * h2 * h4) / (h15 * h25 * h35 * h45)) * f[i + 2])
for i in xrange(len(t) - 2, len(t)):
t_i = t[i]
t1 = t[-5]
t2 = t[-4]
t3 = t[-3]
t4 = t[-2]
t5 = t[-1]
h1 = t1 - t_i
h2 = t2 - t_i
h3 = t3 - t_i
h4 = t4 - t_i
h5 = t5 - t_i
h12 = t1 - t2
h13 = t1 - t3
h14 = t1 - t4
h15 = t1 - t5
h23 = t2 - t3
h24 = t2 - t4
h25 = t2 - t5
h34 = t3 - t4
h35 = t3 - t5
h45 = t4 - t5
dfdt[i] = (-((h2 * h3 * h4 + h2 * h3 * h5 + h2 * h4 * h5 + h3 * h4 * h5) / (h12 * h13 * h14 * h15)) * f[-5]
+ ((h1 * h3 * h4 + h1 * h3 * h5 + h1 * h4 * h5 + h3 * h4 * h5) / (h12 * h23 * h24 * h25)) * f[-4]
- ((h1 * h2 * h4 + h1 * h2 * h5 + h1 * h4 * h5 + h2 * h4 * h5) / (h13 * h23 * h34 * h35)) * f[-3]
+ ((h1 * h2 * h3 + h1 * h2 * h5 + h1 * h3 * h5 + h2 * h3 * h5) / (h14 * h24 * h34 * h45)) * f[-2]
- ((h1 * h2 * h3 + h1 * h2 * h4 + h1 * h3 * h4 + h2 * h3 * h4) / (h15 * h25 * h35 * h45)) * f[-1])
return
@njit
def _derivative_2d(f, t, dfdt):
for i in xrange(2):
t_i = t[i]
t1 = t[0]
t2 = t[1]
t3 = t[2]
t4 = t[3]
t5 = t[4]
h1 = t1 - t_i
h2 = t2 - t_i
h3 = t3 - t_i
h4 = t4 - t_i
h5 = t5 - t_i
h12 = t1 - t2
h13 = t1 - t3
h14 = t1 - t4
h15 = t1 - t5
h23 = t2 - t3
h24 = t2 - t4
h25 = t2 - t5
h34 = t3 - t4
h35 = t3 - t5
h45 = t4 - t5
for k in xrange(f.shape[1]):
dfdt[i, k] = (
-((h2 * h3 * h4 + h2 * h3 * h5 + h2 * h4 * h5 + h3 * h4 * h5) / (h12 * h13 * h14 * h15)) * f[0, k]
+ ((h1 * h3 * h4 + h1 * h3 * h5 + h1 * h4 * h5 + h3 * h4 * h5) / (h12 * h23 * h24 * h25)) * f[1, k]
- ((h1 * h2 * h4 + h1 * h2 * h5 + h1 * h4 * h5 + h2 * h4 * h5) / (h13 * h23 * h34 * h35)) * f[2, k]
+ ((h1 * h2 * h3 + h1 * h2 * h5 + h1 * h3 * h5 + h2 * h3 * h5) / (h14 * h24 * h34 * h45)) * f[3, k]
- ((h1 * h2 * h3 + h1 * h2 * h4 + h1 * h3 * h4 + h2 * h3 * h4) / (h15 * h25 * h35 * h45)) * f[4, k])
for i in xrange(2, len(t) - 2):
t1 = t[i - 2]
t2 = t[i - 1]
t3 = t[i]
t4 = t[i + 1]
t5 = t[i + 2]
h1 = t1 - t3
h2 = t2 - t3
h4 = t4 - t3
h5 = t5 - t3
h12 = t1 - t2
h13 = t1 - t3
h14 = t1 - t4
h15 = t1 - t5
h23 = t2 - t3
h24 = t2 - t4
h25 = t2 - t5
h34 = t3 - t4
h35 = t3 - t5
h45 = t4 - t5
for k in xrange(f.shape[1]):
dfdt[i, k] = (-((h2 * h4 * h5) / (h12 * h13 * h14 * h15)) * f[i - 2, k]
+ ((h1 * h4 * h5) / (h12 * h23 * h24 * h25)) * f[i - 1, k]
- ((h1 * h2 * h4 + h1 * h2 * h5 + h1 * h4 * h5 + h2 * h4 * h5) / (h13 * h23 * h34 * h35))
* f[i, k]
+ ((h1 * h2 * h5) / (h14 * h24 * h34 * h45)) * f[i + 1, k]
- ((h1 * h2 * h4) / (h15 * h25 * h35 * h45)) * f[i + 2, k])
for i in xrange(len(t) - 2, len(t)):
t_i = t[i]
t1 = t[-5]
t2 = t[-4]
t3 = t[-3]
t4 = t[-2]
t5 = t[-1]
h1 = t1 - t_i
h2 = t2 - t_i
h3 = t3 - t_i
h4 = t4 - t_i
h5 = t5 - t_i
h12 = t1 - t2
h13 = t1 - t3
h14 = t1 - t4
h15 = t1 - t5
h23 = t2 - t3
h24 = t2 - t4
h25 = t2 - t5
h34 = t3 - t4
h35 = t3 - t5
h45 = t4 - t5
for k in xrange(f.shape[1]):
dfdt[i, k] = (
-((h2 * h3 * h4 + h2 * h3 * h5 + h2 * h4 * h5 + h3 * h4 * h5) / (h12 * h13 * h14 * h15)) * f[-5, k]
+ ((h1 * h3 * h4 + h1 * h3 * h5 + h1 * h4 * h5 + h3 * h4 * h5) / (h12 * h23 * h24 * h25)) * f[-4, k]
- ((h1 * h2 * h4 + h1 * h2 * h5 + h1 * h4 * h5 + h2 * h4 * h5) / (h13 * h23 * h34 * h35)) * f[-3, k]
+ ((h1 * h2 * h3 + h1 * h2 * h5 + h1 * h3 * h5 + h2 * h3 * h5) / (h14 * h24 * h34 * h45)) * f[-2, k]
- ((h1 * h2 * h3 + h1 * h2 * h4 + h1 * h3 * h4 + h2 * h3 * h4) / (h15 * h25 * h35 * h45)) * f[-1, k])
return
@njit
def _derivative_3d(f, t, dfdt):
for i in xrange(2):
t_i = t[i]
t1 = t[0]
t2 = t[1]
t3 = t[2]
t4 = t[3]
t5 = t[4]
h1 = t1 - t_i
h2 = t2 - t_i
h3 = t3 - t_i
h4 = t4 - t_i
h5 = t5 - t_i
h12 = t1 - t2
h13 = t1 - t3
h14 = t1 - t4
h15 = t1 - t5
h23 = t2 - t3
h24 = t2 - t4
h25 = t2 - t5
h34 = t3 - t4
h35 = t3 - t5
h45 = t4 - t5
for k in xrange(f.shape[1]):
for m in xrange(f.shape[1]):
dfdt[i, k, m] = (
-((h2 * h3 * h4 + h2 * h3 * h5 + h2 * h4 * h5 + h3 * h4 * h5) / (h12 * h13 * h14 * h15)) * f[0, k, m]
+ ((h1 * h3 * h4 + h1 * h3 * h5 + h1 * h4 * h5 + h3 * h4 * h5) / (h12 * h23 * h24 * h25)) * f[1, k, m]
- ((h1 * h2 * h4 + h1 * h2 * h5 + h1 * h4 * h5 + h2 * h4 * h5) / (h13 * h23 * h34 * h35)) * f[2, k, m]
+ ((h1 * h2 * h3 + h1 * h2 * h5 + h1 * h3 * h5 + h2 * h3 * h5) / (h14 * h24 * h34 * h45)) * f[3, k, m]
- ((h1 * h2 * h3 + h1 * h2 * h4 + h1 * h3 * h4 + h2 * h3 * h4) / (h15 * h25 * h35 * h45)) * f[4, k, m])
for i in xrange(2, len(t) - 2):
t1 = t[i - 2]
t2 = t[i - 1]
t3 = t[i]
t4 = t[i + 1]
t5 = t[i + 2]
h1 = t1 - t3
h2 = t2 - t3
h4 = t4 - t3
h5 = t5 - t3
h12 = t1 - t2
h13 = t1 - t3
h14 = t1 - t4
h15 = t1 - t5
h23 = t2 - t3
h24 = t2 - t4
h25 = t2 - t5
h34 = t3 - t4
h35 = t3 - t5
h45 = t4 - t5
for k in xrange(f.shape[1]):
for m in xrange(f.shape[1]):
dfdt[i, k, m] = (-((h2 * h4 * h5) / (h12 * h13 * h14 * h15)) * f[i - 2, k, m]
+ ((h1 * h4 * h5) / (h12 * h23 * h24 * h25)) * f[i - 1, k, m]
- ((h1 * h2 * h4 + h1 * h2 * h5 + h1 * h4 * h5 + h2 * h4 * h5) / (h13 * h23 * h34 * h35))
* f[i, k, m]
+ ((h1 * h2 * h5) / (h14 * h24 * h34 * h45)) * f[i + 1, k, m]
- ((h1 * h2 * h4) / (h15 * h25 * h35 * h45)) * f[i + 2, k, m])
for i in xrange(len(t) - 2, len(t)):
t_i = t[i]
t1 = t[-5]
t2 = t[-4]
t3 = t[-3]
t4 = t[-2]
t5 = t[-1]
h1 = t1 - t_i
h2 = t2 - t_i
h3 = t3 - t_i
h4 = t4 - t_i
h5 = t5 - t_i
h12 = t1 - t2
h13 = t1 - t3
h14 = t1 - t4
h15 = t1 - t5
h23 = t2 - t3
h24 = t2 - t4
h25 = t2 - t5
h34 = t3 - t4
h35 = t3 - t5
h45 = t4 - t5
for k in xrange(f.shape[1]):
for m in xrange(f.shape[1]):
dfdt[i, k, m] = (
-((h2 * h3 * h4 + h2 * h3 * h5 + h2 * h4 * h5 + h3 * h4 * h5) / (h12 * h13 * h14 * h15)) * f[-5, k, m]
+ ((h1 * h3 * h4 + h1 * h3 * h5 + h1 * h4 * h5 + h3 * h4 * h5) / (h12 * h23 * h24 * h25)) * f[-4, k, m]
- ((h1 * h2 * h4 + h1 * h2 * h5 + h1 * h4 * h5 + h2 * h4 * h5) / (h13 * h23 * h34 * h35)) * f[-3, k, m]
+ ((h1 * h2 * h3 + h1 * h2 * h5 + h1 * h3 * h5 + h2 * h3 * h5) / (h14 * h24 * h34 * h45)) * f[-2, k, m]
- ((h1 * h2 * h3 + h1 * h2 * h4 + h1 * h3 * h4 + h2 * h3 * h4) / (h15 * h25 * h35 * h45)) * f[-1, k, m])
return
# @njit('void(f8[:,:], f8[:], f8[:,:])')
@jit
def indefinite_integral(f, t):
Sfdt = np.empty_like(f)
Sfdt[0] = 0.0
for i in xrange(1, len(t)):
for j in xrange(f.shape[1]):
Sfdt[i, j] = Sfdt[i - 1, j] + (f[i, j] + f[i - 1, j]) * ((t[i] - t[i - 1]) / 2.0)
return Sfdt
#@njit('void(f8[:,:], f8[:], f8[:])')
@jit
def definite_integral(f, t):
Sfdt = np.zeros_like(f)
for i in xrange(1, f.shape[0]):
Sfdt[i, ...] += (f[i, ...] + f[i - 1, ...]) * ((t[i] - t[i - 1]) / 2.0)
return Sfdt
|
ethereum/eth-account
|
eth_account/signers/local.py
|
LocalAccount.encrypt
|
python
|
def encrypt(self, password, kdf=None, iterations=None):
'''
Generate a string with the encrypted key, as in
:meth:`~eth_account.account.Account.encrypt`, but without a private key argument.
'''
return self._publicapi.encrypt(self.privateKey, password, kdf=kdf, iterations=iterations)
|
Generate a string with the encrypted key, as in
:meth:`~eth_account.account.Account.encrypt`, but without a private key argument.
|
train
|
https://github.com/ethereum/eth-account/blob/335199b815ae34fea87f1523e2f29777fd52946e/eth_account/signers/local.py#L51-L56
| null |
class LocalAccount(BaseAccount):
'''
A collection of convenience methods to sign and encrypt, with an embedded private key.
:var bytes privateKey: the 32-byte private key data
.. code-block:: python
>>> my_local_account.address
"0xF0109fC8DF283027b6285cc889F5aA624EaC1F55"
>>> my_local_account.privateKey
b"\\x01\\x23..."
You can also get the private key by casting the account to :class:`bytes`:
.. code-block:: python
>>> bytes(my_local_account)
b"\\x01\\x23..."
'''
def __init__(self, key, account):
'''
:param eth_keys.PrivateKey key: to prefill in private key execution
:param ~eth_account.account.Account account: the key-unaware management API
'''
self._publicapi = account
self._address = key.public_key.to_checksum_address()
key_raw = key.to_bytes()
self._privateKey = key_raw
self._key_obj = key
@property
def address(self):
return self._address
@property
def privateKey(self):
'''
Get the private key.
'''
return self._privateKey
def signHash(self, message_hash):
return self._publicapi.signHash(
message_hash,
private_key=self.privateKey,
)
def signTransaction(self, transaction_dict):
return self._publicapi.signTransaction(transaction_dict, self.privateKey)
def __bytes__(self):
return self.privateKey
|
ethereum/eth-account
|
eth_account/messages.py
|
defunct_hash_message
|
python
|
def defunct_hash_message(
primitive=None,
*,
hexstr=None,
text=None,
signature_version=b'E',
version_specific_data=None):
'''
Convert the provided message into a message hash, to be signed.
This provides the same prefix and hashing approach as
:meth:`w3.eth.sign() <web3.eth.Eth.sign>`.
Currently you can only specify the ``signature_version`` as following.
* **Version** ``0x45`` (version ``E``): ``b'\\x19Ethereum Signed Message:\\n'``
concatenated with the number of bytes in the message.
.. note:: This is the defualt version followed, if the signature_version is not specified.
* **Version** ``0x00`` (version ``0``): Sign data with intended validator (EIP 191).
Here the version_specific_data would be a hexstr which is the 20 bytes account address
of the intended validator.
For version ``0x45`` (version ``E``), Awkwardly, the number of bytes in the message is
encoded in decimal ascii. So if the message is 'abcde', then the length is encoded as the ascii
character '5'. This is one of the reasons that this message format is not preferred.
There is ambiguity when the message '00' is encoded, for example.
Only use this method with version ``E`` if you must have compatibility with
:meth:`w3.eth.sign() <web3.eth.Eth.sign>`.
Supply exactly one of the three arguments:
bytes, a hex string, or a unicode string.
:param primitive: the binary message to be signed
:type primitive: bytes or int
:param str hexstr: the message encoded as hex
:param str text: the message as a series of unicode characters (a normal Py3 str)
:param bytes signature_version: a byte indicating which kind of prefix is to be added (EIP 191)
:param version_specific_data: the data which is related to the prefix (EIP 191)
:returns: The hash of the message, after adding the prefix
:rtype: ~hexbytes.main.HexBytes
.. code-block:: python
>>> from eth_account.messages import defunct_hash_message
>>> msg = "I♥SF"
>>> defunct_hash_message(text=msg)
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
# these four also produce the same hash:
>>> defunct_hash_message(w3.toBytes(text=msg))
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
>>> defunct_hash_message(bytes(msg, encoding='utf-8'))
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
>>> Web3.toHex(text=msg)
'0x49e299a55346'
>>> defunct_hash_message(hexstr='0x49e299a55346')
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
>>> defunct_hash_message(0x49e299a55346)
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
'''
message_bytes = to_bytes(primitive, hexstr=hexstr, text=text)
recovery_hasher = compose(
HexBytes,
keccak,
signature_wrapper(
signature_version=signature_version,
version_specific_data=version_specific_data,
)
)
return recovery_hasher(message_bytes)
|
Convert the provided message into a message hash, to be signed.
This provides the same prefix and hashing approach as
:meth:`w3.eth.sign() <web3.eth.Eth.sign>`.
Currently you can only specify the ``signature_version`` as following.
* **Version** ``0x45`` (version ``E``): ``b'\\x19Ethereum Signed Message:\\n'``
concatenated with the number of bytes in the message.
.. note:: This is the defualt version followed, if the signature_version is not specified.
* **Version** ``0x00`` (version ``0``): Sign data with intended validator (EIP 191).
Here the version_specific_data would be a hexstr which is the 20 bytes account address
of the intended validator.
For version ``0x45`` (version ``E``), Awkwardly, the number of bytes in the message is
encoded in decimal ascii. So if the message is 'abcde', then the length is encoded as the ascii
character '5'. This is one of the reasons that this message format is not preferred.
There is ambiguity when the message '00' is encoded, for example.
Only use this method with version ``E`` if you must have compatibility with
:meth:`w3.eth.sign() <web3.eth.Eth.sign>`.
Supply exactly one of the three arguments:
bytes, a hex string, or a unicode string.
:param primitive: the binary message to be signed
:type primitive: bytes or int
:param str hexstr: the message encoded as hex
:param str text: the message as a series of unicode characters (a normal Py3 str)
:param bytes signature_version: a byte indicating which kind of prefix is to be added (EIP 191)
:param version_specific_data: the data which is related to the prefix (EIP 191)
:returns: The hash of the message, after adding the prefix
:rtype: ~hexbytes.main.HexBytes
.. code-block:: python
>>> from eth_account.messages import defunct_hash_message
>>> msg = "I♥SF"
>>> defunct_hash_message(text=msg)
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
# these four also produce the same hash:
>>> defunct_hash_message(w3.toBytes(text=msg))
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
>>> defunct_hash_message(bytes(msg, encoding='utf-8'))
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
>>> Web3.toHex(text=msg)
'0x49e299a55346'
>>> defunct_hash_message(hexstr='0x49e299a55346')
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
>>> defunct_hash_message(0x49e299a55346)
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
|
train
|
https://github.com/ethereum/eth-account/blob/335199b815ae34fea87f1523e2f29777fd52946e/eth_account/messages.py#L17-L90
| null |
from cytoolz import (
compose,
)
from eth_utils import (
keccak,
to_bytes,
)
from hexbytes import (
HexBytes,
)
from eth_account._utils.signing import (
signature_wrapper,
)
|
ethereum/eth-account
|
eth_account/account.py
|
Account.create
|
python
|
def create(self, extra_entropy=''):
'''
Creates a new private key, and returns it as a :class:`~eth_account.local.LocalAccount`.
:param extra_entropy: Add extra randomness to whatever randomness your OS can provide
:type extra_entropy: str or bytes or int
:returns: an object with private key and convenience methods
.. code-block:: python
>>> from eth_account import Account
>>> acct = Account.create('KEYSMASH FJAFJKLDSKF7JKFDJ 1530')
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
extra_key_bytes = text_if_str(to_bytes, extra_entropy)
key_bytes = keccak(os.urandom(32) + extra_key_bytes)
return self.privateKeyToAccount(key_bytes)
|
Creates a new private key, and returns it as a :class:`~eth_account.local.LocalAccount`.
:param extra_entropy: Add extra randomness to whatever randomness your OS can provide
:type extra_entropy: str or bytes or int
:returns: an object with private key and convenience methods
.. code-block:: python
>>> from eth_account import Account
>>> acct = Account.create('KEYSMASH FJAFJKLDSKF7JKFDJ 1530')
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
|
train
|
https://github.com/ethereum/eth-account/blob/335199b815ae34fea87f1523e2f29777fd52946e/eth_account/account.py#L69-L92
| null |
class Account(object):
'''
This is the primary entry point for working with Ethereum private keys.
It does **not** require a connection to an Ethereum node.
'''
_keys = keys
default_kdf = os.getenv('ETH_ACCOUNT_KDF', 'scrypt')
'''
The default key deriviation function (KDF) to use when encrypting a private key. If the
environment variable :envvar:`ETH_ACCOUNT_KDF` is set, it's value will be used as the default.
Otherwise, 'scrypt' will be used as the default.
'''
@combomethod
@staticmethod
def decrypt(keyfile_json, password):
'''
Decrypts a private key that was encrypted using an Ethereum client or
:meth:`~Account.encrypt`.
:param keyfile_json: The encrypted key
:type keyfile_json: dict or str
:param str password: The password that was used to encrypt the key
:returns: the raw private key
:rtype: ~hexbytes.main.HexBytes
.. code-block:: python
>>> encrypted = {
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {'cipher': 'aes-128-ctr',
'cipherparams': {'iv': '78f214584844e0b241b433d7c3bb8d5f'},
'ciphertext': 'd6dbb56e4f54ba6db2e8dc14df17cb7352fdce03681dd3f90ce4b6c1d5af2c4f',
'kdf': 'pbkdf2',
'kdfparams': {'c': 1000000,
'dklen': 32,
'prf': 'hmac-sha256',
'salt': '45cf943b4de2c05c2c440ef96af914a2'},
'mac': 'f5e1af09df5ded25c96fcf075ada313fb6f79735a914adc8cb02e8ddee7813c3'},
'id': 'b812f3f9-78cc-462a-9e89-74418aa27cb0',
'version': 3}
>>> import getpass
>>> Account.decrypt(encrypted, getpass.getpass())
HexBytes('0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364')
'''
if isinstance(keyfile_json, str):
keyfile = json.loads(keyfile_json)
elif is_dict(keyfile_json):
keyfile = keyfile_json
else:
raise TypeError("The keyfile should be supplied as a JSON string, or a dictionary.")
password_bytes = text_if_str(to_bytes, password)
return HexBytes(decode_keyfile_json(keyfile, password_bytes))
@classmethod
def encrypt(cls, private_key, password, kdf=None, iterations=None):
'''
Creates a dictionary with an encrypted version of your private key.
To import this keyfile into Ethereum clients like geth and parity:
encode this dictionary with :func:`json.dumps` and save it to disk where your
client keeps key files.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:param str password: The password which you will need to unlock the account in your client
:param str kdf: The key derivation function to use when encrypting your private key
:param int iterations: The work factor for the key derivation function
:returns: The data to use in your encrypted file
:rtype: dict
.. code-block:: python
>>> import getpass
>>> encrypted = Account.encrypt(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364,
getpass.getpass()
)
{
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {
'cipher': 'aes-128-ctr',
'cipherparams': {
'iv': '0b7845a5c3597d3d378bde9b7c7319b7'
},
'ciphertext': 'a494f1feb3c854e99c1ff01e6aaa17d43c0752009073503b908457dc8de5d2a5', # noqa: E501
'kdf': 'scrypt',
'kdfparams': {
'dklen': 32,
'n': 262144,
'p': 8,
'r': 1,
'salt': '13c4a48123affaa29189e9097726c698'
},
'mac': 'f4cfb027eb0af9bd7a320b4374a3fa7bef02cfbafe0ec5d1fd7ad129401de0b1'
},
'id': 'a60e0578-0e5b-4a75-b991-d55ec6451a6f',
'version': 3
}
>>> with open('my-keyfile', 'w') as f:
f.write(json.dumps(encrypted))
'''
if isinstance(private_key, keys.PrivateKey):
key_bytes = private_key.to_bytes()
else:
key_bytes = HexBytes(private_key)
if kdf is None:
kdf = cls.default_kdf
password_bytes = text_if_str(to_bytes, password)
assert len(key_bytes) == 32
return create_keyfile_json(key_bytes, password_bytes, kdf=kdf, iterations=iterations)
@combomethod
def privateKeyToAccount(self, private_key):
'''
Returns a convenient object for working with the given private key.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:return: object with methods for signing and encrypting
:rtype: LocalAccount
.. code-block:: python
>>> acct = Account.privateKeyToAccount(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364)
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
key = self._parsePrivateKey(private_key)
return LocalAccount(key, self)
@combomethod
def recoverHash(self, message_hash, vrs=None, signature=None):
'''
Get the address of the account that signed the message with the given hash.
You must specify exactly one of: vrs or signature
:param message_hash: the hash of the message that you want to verify
:type message_hash: hex str or bytes or int
:param vrs: the three pieces generated by an elliptic curve signature
:type vrs: tuple(v, r, s), each element is hex str, bytes or int
:param signature: signature bytes concatenated as r+s+v
:type signature: hex str or bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> msg = "I♥SF"
>>> msghash = '0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'
>>> vrs = (
28,
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
# All of these recover calls are equivalent:
# variations on msghash
>>> msghash = b"\\x14v\\xab\\xb7E\\xd4#\\xbf\\t'?\\x1a\\xfd\\x88}\\x95\\x11\\x81\\xd2Z\\xdcf\\xc4\\x83JpI\\x19\\x11\\xb7\\xf7P" # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> msghash = 0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on vrs
>>> vrs = (
'0x1c',
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
b'\\x1c',
b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3', # noqa: E501
b'>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce') # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
0x1c,
0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3,
0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce)
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on signature
>>> signature = '0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce\\x1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = 0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
'''
hash_bytes = HexBytes(message_hash)
if len(hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
if vrs is not None:
v, r, s = map(hexstr_if_str(to_int), vrs)
v_standard = to_standard_v(v)
signature_obj = self._keys.Signature(vrs=(v_standard, r, s))
elif signature is not None:
signature_bytes = HexBytes(signature)
signature_bytes_standard = to_standard_signature_bytes(signature_bytes)
signature_obj = self._keys.Signature(signature_bytes=signature_bytes_standard)
else:
raise TypeError("You must supply the vrs tuple or the signature bytes")
pubkey = signature_obj.recover_public_key_from_msg_hash(hash_bytes)
return pubkey.to_checksum_address()
@combomethod
def recoverTransaction(self, serialized_transaction):
'''
Get the address of the account that signed this transaction.
:param serialized_transaction: the complete signed transaction
:type serialized_transaction: hex str, bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> raw_transaction = '0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428', # noqa: E501
>>> Account.recoverTransaction(raw_transaction)
'0x2c7536E3605D9C16a7a3D7b1898e529396a65c23'
'''
txn_bytes = HexBytes(serialized_transaction)
txn = Transaction.from_bytes(txn_bytes)
msg_hash = hash_of_signed_transaction(txn)
return self.recoverHash(msg_hash, vrs=vrs_from(txn))
def setKeyBackend(self, backend):
'''
Change the backend used by the underlying eth-keys library.
*(The default is fine for most users)*
:param backend: any backend that works in
`eth_keys.KeyApi(backend) <https://github.com/ethereum/eth-keys/#keyapibackendnone>`_
'''
self._keys = KeyAPI(backend)
@combomethod
def signHash(self, message_hash, private_key):
'''
Sign the hash provided.
.. WARNING:: *Never* sign a hash that you didn't generate,
it can be an arbitrary transaction. For example, it might
send all of your account's ether to an attacker.
If you would like compatibility with
:meth:`w3.eth.sign() <web3.eth.Eth.sign>`
you can use :meth:`~eth_account.messages.defunct_hash_message`.
Several other message standards are proposed, but none have a clear
consensus. You'll need to manually comply with any of those message standards manually.
:param message_hash: the 32-byte message hash to be signed
:type message_hash: hex str, bytes or int
:param private_key: the key to sign the message with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: ~eth_account.datastructures.AttributeDict
.. code-block:: python
>>> msg = "I♥SF"
>>> from eth_account.messages import defunct_hash_message
>>> msghash = defunct_hash_message(text=msg)
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
>>> key = "0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364"
>>> Account.signHash(msghash, key)
{'messageHash': HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'), # noqa: E501
'r': 104389933075820307925104709181714897380569894203213074526835978196648170704563,
's': 28205917190874851400050446352651915501321657673772411533993420917949420456142,
'signature': HexBytes('0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c'), # noqa: E501
'v': 28}
# these are equivalent:
>>> Account.signHash(
0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750,
key
)
>>> Account.signHash(
"0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750",
key
)
'''
msg_hash_bytes = HexBytes(message_hash)
if len(msg_hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
key = self._parsePrivateKey(private_key)
(v, r, s, eth_signature_bytes) = sign_message_hash(key, msg_hash_bytes)
return AttributeDict({
'messageHash': msg_hash_bytes,
'r': r,
's': s,
'v': v,
'signature': HexBytes(eth_signature_bytes),
})
@combomethod
def signTransaction(self, transaction_dict, private_key):
'''
Sign a transaction using a local private key. Produces signature details
and the hex-encoded transaction suitable for broadcast using
:meth:`w3.eth.sendRawTransaction() <web3.eth.Eth.sendRawTransaction>`.
Create the transaction dict for a contract method with
`my_contract.functions.my_function().buildTransaction()
<http://web3py.readthedocs.io/en/latest/contracts.html#methods>`_
:param dict transaction_dict: the transaction with keys:
nonce, chainId, to, data, value, gas, and gasPrice.
:param private_key: the private key to sign the data with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: AttributeDict
.. code-block:: python
>>> transaction = {
# Note that the address must be in checksum format or native bytes:
'to': '0xF0109fC8DF283027b6285cc889F5aA624EaC1F55',
'value': 1000000000,
'gas': 2000000,
'gasPrice': 234567897654321,
'nonce': 0,
'chainId': 1
}
>>> key = '0x4c0883a69102937d6231471b5dbb6204fe5129617082792ae468d01a3f362318'
>>> signed = Account.signTransaction(transaction, key)
{'hash': HexBytes('0x6893a6ee8df79b0f5d64a180cd1ef35d030f3e296a5361cf04d02ce720d32ec5'),
'r': 4487286261793418179817841024889747115779324305375823110249149479905075174044,
'rawTransaction': HexBytes('0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428'), # noqa: E501
's': 30785525769477805655994251009256770582792548537338581640010273753578382951464,
'v': 37}
>>> w3.eth.sendRawTransaction(signed.rawTransaction)
'''
if not isinstance(transaction_dict, Mapping):
raise TypeError("transaction_dict must be dict-like, got %r" % transaction_dict)
account = self.privateKeyToAccount(private_key)
# allow from field, *only* if it matches the private key
if 'from' in transaction_dict:
if transaction_dict['from'] == account.address:
sanitized_transaction = dissoc(transaction_dict, 'from')
else:
raise TypeError("from field must match key's %s, but it was %s" % (
account.address,
transaction_dict['from'],
))
else:
sanitized_transaction = transaction_dict
# sign transaction
(
v,
r,
s,
rlp_encoded,
) = sign_transaction_dict(account._key_obj, sanitized_transaction)
transaction_hash = keccak(rlp_encoded)
return AttributeDict({
'rawTransaction': HexBytes(rlp_encoded),
'hash': HexBytes(transaction_hash),
'r': r,
's': s,
'v': v,
})
@combomethod
def _parsePrivateKey(self, key):
'''
Generate a :class:`eth_keys.datatypes.PrivateKey` from the provided key. If the
key is already of type :class:`eth_keys.datatypes.PrivateKey`, return the key.
:param key: the private key from which a :class:`eth_keys.datatypes.PrivateKey`
will be generated
:type key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: the provided key represented as a :class:`eth_keys.datatypes.PrivateKey`
'''
if isinstance(key, self._keys.PrivateKey):
return key
try:
return self._keys.PrivateKey(HexBytes(key))
except ValidationError as original_exception:
raise ValueError(
"The private key must be exactly 32 bytes long, instead of "
"%d bytes." % len(key)
) from original_exception
|
ethereum/eth-account
|
eth_account/account.py
|
Account.decrypt
|
python
|
def decrypt(keyfile_json, password):
'''
Decrypts a private key that was encrypted using an Ethereum client or
:meth:`~Account.encrypt`.
:param keyfile_json: The encrypted key
:type keyfile_json: dict or str
:param str password: The password that was used to encrypt the key
:returns: the raw private key
:rtype: ~hexbytes.main.HexBytes
.. code-block:: python
>>> encrypted = {
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {'cipher': 'aes-128-ctr',
'cipherparams': {'iv': '78f214584844e0b241b433d7c3bb8d5f'},
'ciphertext': 'd6dbb56e4f54ba6db2e8dc14df17cb7352fdce03681dd3f90ce4b6c1d5af2c4f',
'kdf': 'pbkdf2',
'kdfparams': {'c': 1000000,
'dklen': 32,
'prf': 'hmac-sha256',
'salt': '45cf943b4de2c05c2c440ef96af914a2'},
'mac': 'f5e1af09df5ded25c96fcf075ada313fb6f79735a914adc8cb02e8ddee7813c3'},
'id': 'b812f3f9-78cc-462a-9e89-74418aa27cb0',
'version': 3}
>>> import getpass
>>> Account.decrypt(encrypted, getpass.getpass())
HexBytes('0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364')
'''
if isinstance(keyfile_json, str):
keyfile = json.loads(keyfile_json)
elif is_dict(keyfile_json):
keyfile = keyfile_json
else:
raise TypeError("The keyfile should be supplied as a JSON string, or a dictionary.")
password_bytes = text_if_str(to_bytes, password)
return HexBytes(decode_keyfile_json(keyfile, password_bytes))
|
Decrypts a private key that was encrypted using an Ethereum client or
:meth:`~Account.encrypt`.
:param keyfile_json: The encrypted key
:type keyfile_json: dict or str
:param str password: The password that was used to encrypt the key
:returns: the raw private key
:rtype: ~hexbytes.main.HexBytes
.. code-block:: python
>>> encrypted = {
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {'cipher': 'aes-128-ctr',
'cipherparams': {'iv': '78f214584844e0b241b433d7c3bb8d5f'},
'ciphertext': 'd6dbb56e4f54ba6db2e8dc14df17cb7352fdce03681dd3f90ce4b6c1d5af2c4f',
'kdf': 'pbkdf2',
'kdfparams': {'c': 1000000,
'dklen': 32,
'prf': 'hmac-sha256',
'salt': '45cf943b4de2c05c2c440ef96af914a2'},
'mac': 'f5e1af09df5ded25c96fcf075ada313fb6f79735a914adc8cb02e8ddee7813c3'},
'id': 'b812f3f9-78cc-462a-9e89-74418aa27cb0',
'version': 3}
>>> import getpass
>>> Account.decrypt(encrypted, getpass.getpass())
HexBytes('0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364')
|
train
|
https://github.com/ethereum/eth-account/blob/335199b815ae34fea87f1523e2f29777fd52946e/eth_account/account.py#L95-L134
| null |
class Account(object):
'''
This is the primary entry point for working with Ethereum private keys.
It does **not** require a connection to an Ethereum node.
'''
_keys = keys
default_kdf = os.getenv('ETH_ACCOUNT_KDF', 'scrypt')
'''
The default key deriviation function (KDF) to use when encrypting a private key. If the
environment variable :envvar:`ETH_ACCOUNT_KDF` is set, it's value will be used as the default.
Otherwise, 'scrypt' will be used as the default.
'''
@combomethod
def create(self, extra_entropy=''):
'''
Creates a new private key, and returns it as a :class:`~eth_account.local.LocalAccount`.
:param extra_entropy: Add extra randomness to whatever randomness your OS can provide
:type extra_entropy: str or bytes or int
:returns: an object with private key and convenience methods
.. code-block:: python
>>> from eth_account import Account
>>> acct = Account.create('KEYSMASH FJAFJKLDSKF7JKFDJ 1530')
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
extra_key_bytes = text_if_str(to_bytes, extra_entropy)
key_bytes = keccak(os.urandom(32) + extra_key_bytes)
return self.privateKeyToAccount(key_bytes)
@staticmethod
@classmethod
def encrypt(cls, private_key, password, kdf=None, iterations=None):
'''
Creates a dictionary with an encrypted version of your private key.
To import this keyfile into Ethereum clients like geth and parity:
encode this dictionary with :func:`json.dumps` and save it to disk where your
client keeps key files.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:param str password: The password which you will need to unlock the account in your client
:param str kdf: The key derivation function to use when encrypting your private key
:param int iterations: The work factor for the key derivation function
:returns: The data to use in your encrypted file
:rtype: dict
.. code-block:: python
>>> import getpass
>>> encrypted = Account.encrypt(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364,
getpass.getpass()
)
{
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {
'cipher': 'aes-128-ctr',
'cipherparams': {
'iv': '0b7845a5c3597d3d378bde9b7c7319b7'
},
'ciphertext': 'a494f1feb3c854e99c1ff01e6aaa17d43c0752009073503b908457dc8de5d2a5', # noqa: E501
'kdf': 'scrypt',
'kdfparams': {
'dklen': 32,
'n': 262144,
'p': 8,
'r': 1,
'salt': '13c4a48123affaa29189e9097726c698'
},
'mac': 'f4cfb027eb0af9bd7a320b4374a3fa7bef02cfbafe0ec5d1fd7ad129401de0b1'
},
'id': 'a60e0578-0e5b-4a75-b991-d55ec6451a6f',
'version': 3
}
>>> with open('my-keyfile', 'w') as f:
f.write(json.dumps(encrypted))
'''
if isinstance(private_key, keys.PrivateKey):
key_bytes = private_key.to_bytes()
else:
key_bytes = HexBytes(private_key)
if kdf is None:
kdf = cls.default_kdf
password_bytes = text_if_str(to_bytes, password)
assert len(key_bytes) == 32
return create_keyfile_json(key_bytes, password_bytes, kdf=kdf, iterations=iterations)
@combomethod
def privateKeyToAccount(self, private_key):
'''
Returns a convenient object for working with the given private key.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:return: object with methods for signing and encrypting
:rtype: LocalAccount
.. code-block:: python
>>> acct = Account.privateKeyToAccount(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364)
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
key = self._parsePrivateKey(private_key)
return LocalAccount(key, self)
@combomethod
def recoverHash(self, message_hash, vrs=None, signature=None):
'''
Get the address of the account that signed the message with the given hash.
You must specify exactly one of: vrs or signature
:param message_hash: the hash of the message that you want to verify
:type message_hash: hex str or bytes or int
:param vrs: the three pieces generated by an elliptic curve signature
:type vrs: tuple(v, r, s), each element is hex str, bytes or int
:param signature: signature bytes concatenated as r+s+v
:type signature: hex str or bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> msg = "I♥SF"
>>> msghash = '0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'
>>> vrs = (
28,
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
# All of these recover calls are equivalent:
# variations on msghash
>>> msghash = b"\\x14v\\xab\\xb7E\\xd4#\\xbf\\t'?\\x1a\\xfd\\x88}\\x95\\x11\\x81\\xd2Z\\xdcf\\xc4\\x83JpI\\x19\\x11\\xb7\\xf7P" # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> msghash = 0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on vrs
>>> vrs = (
'0x1c',
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
b'\\x1c',
b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3', # noqa: E501
b'>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce') # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
0x1c,
0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3,
0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce)
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on signature
>>> signature = '0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce\\x1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = 0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
'''
hash_bytes = HexBytes(message_hash)
if len(hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
if vrs is not None:
v, r, s = map(hexstr_if_str(to_int), vrs)
v_standard = to_standard_v(v)
signature_obj = self._keys.Signature(vrs=(v_standard, r, s))
elif signature is not None:
signature_bytes = HexBytes(signature)
signature_bytes_standard = to_standard_signature_bytes(signature_bytes)
signature_obj = self._keys.Signature(signature_bytes=signature_bytes_standard)
else:
raise TypeError("You must supply the vrs tuple or the signature bytes")
pubkey = signature_obj.recover_public_key_from_msg_hash(hash_bytes)
return pubkey.to_checksum_address()
@combomethod
def recoverTransaction(self, serialized_transaction):
'''
Get the address of the account that signed this transaction.
:param serialized_transaction: the complete signed transaction
:type serialized_transaction: hex str, bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> raw_transaction = '0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428', # noqa: E501
>>> Account.recoverTransaction(raw_transaction)
'0x2c7536E3605D9C16a7a3D7b1898e529396a65c23'
'''
txn_bytes = HexBytes(serialized_transaction)
txn = Transaction.from_bytes(txn_bytes)
msg_hash = hash_of_signed_transaction(txn)
return self.recoverHash(msg_hash, vrs=vrs_from(txn))
def setKeyBackend(self, backend):
'''
Change the backend used by the underlying eth-keys library.
*(The default is fine for most users)*
:param backend: any backend that works in
`eth_keys.KeyApi(backend) <https://github.com/ethereum/eth-keys/#keyapibackendnone>`_
'''
self._keys = KeyAPI(backend)
@combomethod
def signHash(self, message_hash, private_key):
'''
Sign the hash provided.
.. WARNING:: *Never* sign a hash that you didn't generate,
it can be an arbitrary transaction. For example, it might
send all of your account's ether to an attacker.
If you would like compatibility with
:meth:`w3.eth.sign() <web3.eth.Eth.sign>`
you can use :meth:`~eth_account.messages.defunct_hash_message`.
Several other message standards are proposed, but none have a clear
consensus. You'll need to manually comply with any of those message standards manually.
:param message_hash: the 32-byte message hash to be signed
:type message_hash: hex str, bytes or int
:param private_key: the key to sign the message with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: ~eth_account.datastructures.AttributeDict
.. code-block:: python
>>> msg = "I♥SF"
>>> from eth_account.messages import defunct_hash_message
>>> msghash = defunct_hash_message(text=msg)
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
>>> key = "0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364"
>>> Account.signHash(msghash, key)
{'messageHash': HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'), # noqa: E501
'r': 104389933075820307925104709181714897380569894203213074526835978196648170704563,
's': 28205917190874851400050446352651915501321657673772411533993420917949420456142,
'signature': HexBytes('0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c'), # noqa: E501
'v': 28}
# these are equivalent:
>>> Account.signHash(
0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750,
key
)
>>> Account.signHash(
"0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750",
key
)
'''
msg_hash_bytes = HexBytes(message_hash)
if len(msg_hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
key = self._parsePrivateKey(private_key)
(v, r, s, eth_signature_bytes) = sign_message_hash(key, msg_hash_bytes)
return AttributeDict({
'messageHash': msg_hash_bytes,
'r': r,
's': s,
'v': v,
'signature': HexBytes(eth_signature_bytes),
})
@combomethod
def signTransaction(self, transaction_dict, private_key):
'''
Sign a transaction using a local private key. Produces signature details
and the hex-encoded transaction suitable for broadcast using
:meth:`w3.eth.sendRawTransaction() <web3.eth.Eth.sendRawTransaction>`.
Create the transaction dict for a contract method with
`my_contract.functions.my_function().buildTransaction()
<http://web3py.readthedocs.io/en/latest/contracts.html#methods>`_
:param dict transaction_dict: the transaction with keys:
nonce, chainId, to, data, value, gas, and gasPrice.
:param private_key: the private key to sign the data with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: AttributeDict
.. code-block:: python
>>> transaction = {
# Note that the address must be in checksum format or native bytes:
'to': '0xF0109fC8DF283027b6285cc889F5aA624EaC1F55',
'value': 1000000000,
'gas': 2000000,
'gasPrice': 234567897654321,
'nonce': 0,
'chainId': 1
}
>>> key = '0x4c0883a69102937d6231471b5dbb6204fe5129617082792ae468d01a3f362318'
>>> signed = Account.signTransaction(transaction, key)
{'hash': HexBytes('0x6893a6ee8df79b0f5d64a180cd1ef35d030f3e296a5361cf04d02ce720d32ec5'),
'r': 4487286261793418179817841024889747115779324305375823110249149479905075174044,
'rawTransaction': HexBytes('0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428'), # noqa: E501
's': 30785525769477805655994251009256770582792548537338581640010273753578382951464,
'v': 37}
>>> w3.eth.sendRawTransaction(signed.rawTransaction)
'''
if not isinstance(transaction_dict, Mapping):
raise TypeError("transaction_dict must be dict-like, got %r" % transaction_dict)
account = self.privateKeyToAccount(private_key)
# allow from field, *only* if it matches the private key
if 'from' in transaction_dict:
if transaction_dict['from'] == account.address:
sanitized_transaction = dissoc(transaction_dict, 'from')
else:
raise TypeError("from field must match key's %s, but it was %s" % (
account.address,
transaction_dict['from'],
))
else:
sanitized_transaction = transaction_dict
# sign transaction
(
v,
r,
s,
rlp_encoded,
) = sign_transaction_dict(account._key_obj, sanitized_transaction)
transaction_hash = keccak(rlp_encoded)
return AttributeDict({
'rawTransaction': HexBytes(rlp_encoded),
'hash': HexBytes(transaction_hash),
'r': r,
's': s,
'v': v,
})
@combomethod
def _parsePrivateKey(self, key):
'''
Generate a :class:`eth_keys.datatypes.PrivateKey` from the provided key. If the
key is already of type :class:`eth_keys.datatypes.PrivateKey`, return the key.
:param key: the private key from which a :class:`eth_keys.datatypes.PrivateKey`
will be generated
:type key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: the provided key represented as a :class:`eth_keys.datatypes.PrivateKey`
'''
if isinstance(key, self._keys.PrivateKey):
return key
try:
return self._keys.PrivateKey(HexBytes(key))
except ValidationError as original_exception:
raise ValueError(
"The private key must be exactly 32 bytes long, instead of "
"%d bytes." % len(key)
) from original_exception
|
ethereum/eth-account
|
eth_account/account.py
|
Account.encrypt
|
python
|
def encrypt(cls, private_key, password, kdf=None, iterations=None):
'''
Creates a dictionary with an encrypted version of your private key.
To import this keyfile into Ethereum clients like geth and parity:
encode this dictionary with :func:`json.dumps` and save it to disk where your
client keeps key files.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:param str password: The password which you will need to unlock the account in your client
:param str kdf: The key derivation function to use when encrypting your private key
:param int iterations: The work factor for the key derivation function
:returns: The data to use in your encrypted file
:rtype: dict
.. code-block:: python
>>> import getpass
>>> encrypted = Account.encrypt(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364,
getpass.getpass()
)
{
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {
'cipher': 'aes-128-ctr',
'cipherparams': {
'iv': '0b7845a5c3597d3d378bde9b7c7319b7'
},
'ciphertext': 'a494f1feb3c854e99c1ff01e6aaa17d43c0752009073503b908457dc8de5d2a5', # noqa: E501
'kdf': 'scrypt',
'kdfparams': {
'dklen': 32,
'n': 262144,
'p': 8,
'r': 1,
'salt': '13c4a48123affaa29189e9097726c698'
},
'mac': 'f4cfb027eb0af9bd7a320b4374a3fa7bef02cfbafe0ec5d1fd7ad129401de0b1'
},
'id': 'a60e0578-0e5b-4a75-b991-d55ec6451a6f',
'version': 3
}
>>> with open('my-keyfile', 'w') as f:
f.write(json.dumps(encrypted))
'''
if isinstance(private_key, keys.PrivateKey):
key_bytes = private_key.to_bytes()
else:
key_bytes = HexBytes(private_key)
if kdf is None:
kdf = cls.default_kdf
password_bytes = text_if_str(to_bytes, password)
assert len(key_bytes) == 32
return create_keyfile_json(key_bytes, password_bytes, kdf=kdf, iterations=iterations)
|
Creates a dictionary with an encrypted version of your private key.
To import this keyfile into Ethereum clients like geth and parity:
encode this dictionary with :func:`json.dumps` and save it to disk where your
client keeps key files.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:param str password: The password which you will need to unlock the account in your client
:param str kdf: The key derivation function to use when encrypting your private key
:param int iterations: The work factor for the key derivation function
:returns: The data to use in your encrypted file
:rtype: dict
.. code-block:: python
>>> import getpass
>>> encrypted = Account.encrypt(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364,
getpass.getpass()
)
{
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {
'cipher': 'aes-128-ctr',
'cipherparams': {
'iv': '0b7845a5c3597d3d378bde9b7c7319b7'
},
'ciphertext': 'a494f1feb3c854e99c1ff01e6aaa17d43c0752009073503b908457dc8de5d2a5', # noqa: E501
'kdf': 'scrypt',
'kdfparams': {
'dklen': 32,
'n': 262144,
'p': 8,
'r': 1,
'salt': '13c4a48123affaa29189e9097726c698'
},
'mac': 'f4cfb027eb0af9bd7a320b4374a3fa7bef02cfbafe0ec5d1fd7ad129401de0b1'
},
'id': 'a60e0578-0e5b-4a75-b991-d55ec6451a6f',
'version': 3
}
>>> with open('my-keyfile', 'w') as f:
f.write(json.dumps(encrypted))
|
train
|
https://github.com/ethereum/eth-account/blob/335199b815ae34fea87f1523e2f29777fd52946e/eth_account/account.py#L137-L196
| null |
class Account(object):
'''
This is the primary entry point for working with Ethereum private keys.
It does **not** require a connection to an Ethereum node.
'''
_keys = keys
default_kdf = os.getenv('ETH_ACCOUNT_KDF', 'scrypt')
'''
The default key deriviation function (KDF) to use when encrypting a private key. If the
environment variable :envvar:`ETH_ACCOUNT_KDF` is set, it's value will be used as the default.
Otherwise, 'scrypt' will be used as the default.
'''
@combomethod
def create(self, extra_entropy=''):
'''
Creates a new private key, and returns it as a :class:`~eth_account.local.LocalAccount`.
:param extra_entropy: Add extra randomness to whatever randomness your OS can provide
:type extra_entropy: str or bytes or int
:returns: an object with private key and convenience methods
.. code-block:: python
>>> from eth_account import Account
>>> acct = Account.create('KEYSMASH FJAFJKLDSKF7JKFDJ 1530')
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
extra_key_bytes = text_if_str(to_bytes, extra_entropy)
key_bytes = keccak(os.urandom(32) + extra_key_bytes)
return self.privateKeyToAccount(key_bytes)
@staticmethod
def decrypt(keyfile_json, password):
'''
Decrypts a private key that was encrypted using an Ethereum client or
:meth:`~Account.encrypt`.
:param keyfile_json: The encrypted key
:type keyfile_json: dict or str
:param str password: The password that was used to encrypt the key
:returns: the raw private key
:rtype: ~hexbytes.main.HexBytes
.. code-block:: python
>>> encrypted = {
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {'cipher': 'aes-128-ctr',
'cipherparams': {'iv': '78f214584844e0b241b433d7c3bb8d5f'},
'ciphertext': 'd6dbb56e4f54ba6db2e8dc14df17cb7352fdce03681dd3f90ce4b6c1d5af2c4f',
'kdf': 'pbkdf2',
'kdfparams': {'c': 1000000,
'dklen': 32,
'prf': 'hmac-sha256',
'salt': '45cf943b4de2c05c2c440ef96af914a2'},
'mac': 'f5e1af09df5ded25c96fcf075ada313fb6f79735a914adc8cb02e8ddee7813c3'},
'id': 'b812f3f9-78cc-462a-9e89-74418aa27cb0',
'version': 3}
>>> import getpass
>>> Account.decrypt(encrypted, getpass.getpass())
HexBytes('0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364')
'''
if isinstance(keyfile_json, str):
keyfile = json.loads(keyfile_json)
elif is_dict(keyfile_json):
keyfile = keyfile_json
else:
raise TypeError("The keyfile should be supplied as a JSON string, or a dictionary.")
password_bytes = text_if_str(to_bytes, password)
return HexBytes(decode_keyfile_json(keyfile, password_bytes))
@classmethod
@combomethod
def privateKeyToAccount(self, private_key):
'''
Returns a convenient object for working with the given private key.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:return: object with methods for signing and encrypting
:rtype: LocalAccount
.. code-block:: python
>>> acct = Account.privateKeyToAccount(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364)
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
key = self._parsePrivateKey(private_key)
return LocalAccount(key, self)
@combomethod
def recoverHash(self, message_hash, vrs=None, signature=None):
'''
Get the address of the account that signed the message with the given hash.
You must specify exactly one of: vrs or signature
:param message_hash: the hash of the message that you want to verify
:type message_hash: hex str or bytes or int
:param vrs: the three pieces generated by an elliptic curve signature
:type vrs: tuple(v, r, s), each element is hex str, bytes or int
:param signature: signature bytes concatenated as r+s+v
:type signature: hex str or bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> msg = "I♥SF"
>>> msghash = '0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'
>>> vrs = (
28,
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
# All of these recover calls are equivalent:
# variations on msghash
>>> msghash = b"\\x14v\\xab\\xb7E\\xd4#\\xbf\\t'?\\x1a\\xfd\\x88}\\x95\\x11\\x81\\xd2Z\\xdcf\\xc4\\x83JpI\\x19\\x11\\xb7\\xf7P" # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> msghash = 0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on vrs
>>> vrs = (
'0x1c',
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
b'\\x1c',
b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3', # noqa: E501
b'>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce') # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
0x1c,
0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3,
0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce)
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on signature
>>> signature = '0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce\\x1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = 0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
'''
hash_bytes = HexBytes(message_hash)
if len(hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
if vrs is not None:
v, r, s = map(hexstr_if_str(to_int), vrs)
v_standard = to_standard_v(v)
signature_obj = self._keys.Signature(vrs=(v_standard, r, s))
elif signature is not None:
signature_bytes = HexBytes(signature)
signature_bytes_standard = to_standard_signature_bytes(signature_bytes)
signature_obj = self._keys.Signature(signature_bytes=signature_bytes_standard)
else:
raise TypeError("You must supply the vrs tuple or the signature bytes")
pubkey = signature_obj.recover_public_key_from_msg_hash(hash_bytes)
return pubkey.to_checksum_address()
@combomethod
def recoverTransaction(self, serialized_transaction):
'''
Get the address of the account that signed this transaction.
:param serialized_transaction: the complete signed transaction
:type serialized_transaction: hex str, bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> raw_transaction = '0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428', # noqa: E501
>>> Account.recoverTransaction(raw_transaction)
'0x2c7536E3605D9C16a7a3D7b1898e529396a65c23'
'''
txn_bytes = HexBytes(serialized_transaction)
txn = Transaction.from_bytes(txn_bytes)
msg_hash = hash_of_signed_transaction(txn)
return self.recoverHash(msg_hash, vrs=vrs_from(txn))
def setKeyBackend(self, backend):
'''
Change the backend used by the underlying eth-keys library.
*(The default is fine for most users)*
:param backend: any backend that works in
`eth_keys.KeyApi(backend) <https://github.com/ethereum/eth-keys/#keyapibackendnone>`_
'''
self._keys = KeyAPI(backend)
@combomethod
def signHash(self, message_hash, private_key):
'''
Sign the hash provided.
.. WARNING:: *Never* sign a hash that you didn't generate,
it can be an arbitrary transaction. For example, it might
send all of your account's ether to an attacker.
If you would like compatibility with
:meth:`w3.eth.sign() <web3.eth.Eth.sign>`
you can use :meth:`~eth_account.messages.defunct_hash_message`.
Several other message standards are proposed, but none have a clear
consensus. You'll need to manually comply with any of those message standards manually.
:param message_hash: the 32-byte message hash to be signed
:type message_hash: hex str, bytes or int
:param private_key: the key to sign the message with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: ~eth_account.datastructures.AttributeDict
.. code-block:: python
>>> msg = "I♥SF"
>>> from eth_account.messages import defunct_hash_message
>>> msghash = defunct_hash_message(text=msg)
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
>>> key = "0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364"
>>> Account.signHash(msghash, key)
{'messageHash': HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'), # noqa: E501
'r': 104389933075820307925104709181714897380569894203213074526835978196648170704563,
's': 28205917190874851400050446352651915501321657673772411533993420917949420456142,
'signature': HexBytes('0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c'), # noqa: E501
'v': 28}
# these are equivalent:
>>> Account.signHash(
0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750,
key
)
>>> Account.signHash(
"0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750",
key
)
'''
msg_hash_bytes = HexBytes(message_hash)
if len(msg_hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
key = self._parsePrivateKey(private_key)
(v, r, s, eth_signature_bytes) = sign_message_hash(key, msg_hash_bytes)
return AttributeDict({
'messageHash': msg_hash_bytes,
'r': r,
's': s,
'v': v,
'signature': HexBytes(eth_signature_bytes),
})
@combomethod
def signTransaction(self, transaction_dict, private_key):
'''
Sign a transaction using a local private key. Produces signature details
and the hex-encoded transaction suitable for broadcast using
:meth:`w3.eth.sendRawTransaction() <web3.eth.Eth.sendRawTransaction>`.
Create the transaction dict for a contract method with
`my_contract.functions.my_function().buildTransaction()
<http://web3py.readthedocs.io/en/latest/contracts.html#methods>`_
:param dict transaction_dict: the transaction with keys:
nonce, chainId, to, data, value, gas, and gasPrice.
:param private_key: the private key to sign the data with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: AttributeDict
.. code-block:: python
>>> transaction = {
# Note that the address must be in checksum format or native bytes:
'to': '0xF0109fC8DF283027b6285cc889F5aA624EaC1F55',
'value': 1000000000,
'gas': 2000000,
'gasPrice': 234567897654321,
'nonce': 0,
'chainId': 1
}
>>> key = '0x4c0883a69102937d6231471b5dbb6204fe5129617082792ae468d01a3f362318'
>>> signed = Account.signTransaction(transaction, key)
{'hash': HexBytes('0x6893a6ee8df79b0f5d64a180cd1ef35d030f3e296a5361cf04d02ce720d32ec5'),
'r': 4487286261793418179817841024889747115779324305375823110249149479905075174044,
'rawTransaction': HexBytes('0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428'), # noqa: E501
's': 30785525769477805655994251009256770582792548537338581640010273753578382951464,
'v': 37}
>>> w3.eth.sendRawTransaction(signed.rawTransaction)
'''
if not isinstance(transaction_dict, Mapping):
raise TypeError("transaction_dict must be dict-like, got %r" % transaction_dict)
account = self.privateKeyToAccount(private_key)
# allow from field, *only* if it matches the private key
if 'from' in transaction_dict:
if transaction_dict['from'] == account.address:
sanitized_transaction = dissoc(transaction_dict, 'from')
else:
raise TypeError("from field must match key's %s, but it was %s" % (
account.address,
transaction_dict['from'],
))
else:
sanitized_transaction = transaction_dict
# sign transaction
(
v,
r,
s,
rlp_encoded,
) = sign_transaction_dict(account._key_obj, sanitized_transaction)
transaction_hash = keccak(rlp_encoded)
return AttributeDict({
'rawTransaction': HexBytes(rlp_encoded),
'hash': HexBytes(transaction_hash),
'r': r,
's': s,
'v': v,
})
@combomethod
def _parsePrivateKey(self, key):
'''
Generate a :class:`eth_keys.datatypes.PrivateKey` from the provided key. If the
key is already of type :class:`eth_keys.datatypes.PrivateKey`, return the key.
:param key: the private key from which a :class:`eth_keys.datatypes.PrivateKey`
will be generated
:type key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: the provided key represented as a :class:`eth_keys.datatypes.PrivateKey`
'''
if isinstance(key, self._keys.PrivateKey):
return key
try:
return self._keys.PrivateKey(HexBytes(key))
except ValidationError as original_exception:
raise ValueError(
"The private key must be exactly 32 bytes long, instead of "
"%d bytes." % len(key)
) from original_exception
|
ethereum/eth-account
|
eth_account/account.py
|
Account.privateKeyToAccount
|
python
|
def privateKeyToAccount(self, private_key):
'''
Returns a convenient object for working with the given private key.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:return: object with methods for signing and encrypting
:rtype: LocalAccount
.. code-block:: python
>>> acct = Account.privateKeyToAccount(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364)
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
key = self._parsePrivateKey(private_key)
return LocalAccount(key, self)
|
Returns a convenient object for working with the given private key.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:return: object with methods for signing and encrypting
:rtype: LocalAccount
.. code-block:: python
>>> acct = Account.privateKeyToAccount(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364)
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
|
train
|
https://github.com/ethereum/eth-account/blob/335199b815ae34fea87f1523e2f29777fd52946e/eth_account/account.py#L199-L222
| null |
class Account(object):
'''
This is the primary entry point for working with Ethereum private keys.
It does **not** require a connection to an Ethereum node.
'''
_keys = keys
default_kdf = os.getenv('ETH_ACCOUNT_KDF', 'scrypt')
'''
The default key deriviation function (KDF) to use when encrypting a private key. If the
environment variable :envvar:`ETH_ACCOUNT_KDF` is set, it's value will be used as the default.
Otherwise, 'scrypt' will be used as the default.
'''
@combomethod
def create(self, extra_entropy=''):
'''
Creates a new private key, and returns it as a :class:`~eth_account.local.LocalAccount`.
:param extra_entropy: Add extra randomness to whatever randomness your OS can provide
:type extra_entropy: str or bytes or int
:returns: an object with private key and convenience methods
.. code-block:: python
>>> from eth_account import Account
>>> acct = Account.create('KEYSMASH FJAFJKLDSKF7JKFDJ 1530')
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
extra_key_bytes = text_if_str(to_bytes, extra_entropy)
key_bytes = keccak(os.urandom(32) + extra_key_bytes)
return self.privateKeyToAccount(key_bytes)
@staticmethod
def decrypt(keyfile_json, password):
'''
Decrypts a private key that was encrypted using an Ethereum client or
:meth:`~Account.encrypt`.
:param keyfile_json: The encrypted key
:type keyfile_json: dict or str
:param str password: The password that was used to encrypt the key
:returns: the raw private key
:rtype: ~hexbytes.main.HexBytes
.. code-block:: python
>>> encrypted = {
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {'cipher': 'aes-128-ctr',
'cipherparams': {'iv': '78f214584844e0b241b433d7c3bb8d5f'},
'ciphertext': 'd6dbb56e4f54ba6db2e8dc14df17cb7352fdce03681dd3f90ce4b6c1d5af2c4f',
'kdf': 'pbkdf2',
'kdfparams': {'c': 1000000,
'dklen': 32,
'prf': 'hmac-sha256',
'salt': '45cf943b4de2c05c2c440ef96af914a2'},
'mac': 'f5e1af09df5ded25c96fcf075ada313fb6f79735a914adc8cb02e8ddee7813c3'},
'id': 'b812f3f9-78cc-462a-9e89-74418aa27cb0',
'version': 3}
>>> import getpass
>>> Account.decrypt(encrypted, getpass.getpass())
HexBytes('0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364')
'''
if isinstance(keyfile_json, str):
keyfile = json.loads(keyfile_json)
elif is_dict(keyfile_json):
keyfile = keyfile_json
else:
raise TypeError("The keyfile should be supplied as a JSON string, or a dictionary.")
password_bytes = text_if_str(to_bytes, password)
return HexBytes(decode_keyfile_json(keyfile, password_bytes))
@classmethod
def encrypt(cls, private_key, password, kdf=None, iterations=None):
'''
Creates a dictionary with an encrypted version of your private key.
To import this keyfile into Ethereum clients like geth and parity:
encode this dictionary with :func:`json.dumps` and save it to disk where your
client keeps key files.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:param str password: The password which you will need to unlock the account in your client
:param str kdf: The key derivation function to use when encrypting your private key
:param int iterations: The work factor for the key derivation function
:returns: The data to use in your encrypted file
:rtype: dict
.. code-block:: python
>>> import getpass
>>> encrypted = Account.encrypt(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364,
getpass.getpass()
)
{
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {
'cipher': 'aes-128-ctr',
'cipherparams': {
'iv': '0b7845a5c3597d3d378bde9b7c7319b7'
},
'ciphertext': 'a494f1feb3c854e99c1ff01e6aaa17d43c0752009073503b908457dc8de5d2a5', # noqa: E501
'kdf': 'scrypt',
'kdfparams': {
'dklen': 32,
'n': 262144,
'p': 8,
'r': 1,
'salt': '13c4a48123affaa29189e9097726c698'
},
'mac': 'f4cfb027eb0af9bd7a320b4374a3fa7bef02cfbafe0ec5d1fd7ad129401de0b1'
},
'id': 'a60e0578-0e5b-4a75-b991-d55ec6451a6f',
'version': 3
}
>>> with open('my-keyfile', 'w') as f:
f.write(json.dumps(encrypted))
'''
if isinstance(private_key, keys.PrivateKey):
key_bytes = private_key.to_bytes()
else:
key_bytes = HexBytes(private_key)
if kdf is None:
kdf = cls.default_kdf
password_bytes = text_if_str(to_bytes, password)
assert len(key_bytes) == 32
return create_keyfile_json(key_bytes, password_bytes, kdf=kdf, iterations=iterations)
@combomethod
@combomethod
def recoverHash(self, message_hash, vrs=None, signature=None):
'''
Get the address of the account that signed the message with the given hash.
You must specify exactly one of: vrs or signature
:param message_hash: the hash of the message that you want to verify
:type message_hash: hex str or bytes or int
:param vrs: the three pieces generated by an elliptic curve signature
:type vrs: tuple(v, r, s), each element is hex str, bytes or int
:param signature: signature bytes concatenated as r+s+v
:type signature: hex str or bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> msg = "I♥SF"
>>> msghash = '0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'
>>> vrs = (
28,
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
# All of these recover calls are equivalent:
# variations on msghash
>>> msghash = b"\\x14v\\xab\\xb7E\\xd4#\\xbf\\t'?\\x1a\\xfd\\x88}\\x95\\x11\\x81\\xd2Z\\xdcf\\xc4\\x83JpI\\x19\\x11\\xb7\\xf7P" # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> msghash = 0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on vrs
>>> vrs = (
'0x1c',
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
b'\\x1c',
b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3', # noqa: E501
b'>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce') # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
0x1c,
0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3,
0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce)
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on signature
>>> signature = '0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce\\x1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = 0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
'''
hash_bytes = HexBytes(message_hash)
if len(hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
if vrs is not None:
v, r, s = map(hexstr_if_str(to_int), vrs)
v_standard = to_standard_v(v)
signature_obj = self._keys.Signature(vrs=(v_standard, r, s))
elif signature is not None:
signature_bytes = HexBytes(signature)
signature_bytes_standard = to_standard_signature_bytes(signature_bytes)
signature_obj = self._keys.Signature(signature_bytes=signature_bytes_standard)
else:
raise TypeError("You must supply the vrs tuple or the signature bytes")
pubkey = signature_obj.recover_public_key_from_msg_hash(hash_bytes)
return pubkey.to_checksum_address()
@combomethod
def recoverTransaction(self, serialized_transaction):
'''
Get the address of the account that signed this transaction.
:param serialized_transaction: the complete signed transaction
:type serialized_transaction: hex str, bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> raw_transaction = '0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428', # noqa: E501
>>> Account.recoverTransaction(raw_transaction)
'0x2c7536E3605D9C16a7a3D7b1898e529396a65c23'
'''
txn_bytes = HexBytes(serialized_transaction)
txn = Transaction.from_bytes(txn_bytes)
msg_hash = hash_of_signed_transaction(txn)
return self.recoverHash(msg_hash, vrs=vrs_from(txn))
def setKeyBackend(self, backend):
'''
Change the backend used by the underlying eth-keys library.
*(The default is fine for most users)*
:param backend: any backend that works in
`eth_keys.KeyApi(backend) <https://github.com/ethereum/eth-keys/#keyapibackendnone>`_
'''
self._keys = KeyAPI(backend)
@combomethod
def signHash(self, message_hash, private_key):
'''
Sign the hash provided.
.. WARNING:: *Never* sign a hash that you didn't generate,
it can be an arbitrary transaction. For example, it might
send all of your account's ether to an attacker.
If you would like compatibility with
:meth:`w3.eth.sign() <web3.eth.Eth.sign>`
you can use :meth:`~eth_account.messages.defunct_hash_message`.
Several other message standards are proposed, but none have a clear
consensus. You'll need to manually comply with any of those message standards manually.
:param message_hash: the 32-byte message hash to be signed
:type message_hash: hex str, bytes or int
:param private_key: the key to sign the message with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: ~eth_account.datastructures.AttributeDict
.. code-block:: python
>>> msg = "I♥SF"
>>> from eth_account.messages import defunct_hash_message
>>> msghash = defunct_hash_message(text=msg)
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
>>> key = "0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364"
>>> Account.signHash(msghash, key)
{'messageHash': HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'), # noqa: E501
'r': 104389933075820307925104709181714897380569894203213074526835978196648170704563,
's': 28205917190874851400050446352651915501321657673772411533993420917949420456142,
'signature': HexBytes('0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c'), # noqa: E501
'v': 28}
# these are equivalent:
>>> Account.signHash(
0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750,
key
)
>>> Account.signHash(
"0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750",
key
)
'''
msg_hash_bytes = HexBytes(message_hash)
if len(msg_hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
key = self._parsePrivateKey(private_key)
(v, r, s, eth_signature_bytes) = sign_message_hash(key, msg_hash_bytes)
return AttributeDict({
'messageHash': msg_hash_bytes,
'r': r,
's': s,
'v': v,
'signature': HexBytes(eth_signature_bytes),
})
@combomethod
def signTransaction(self, transaction_dict, private_key):
'''
Sign a transaction using a local private key. Produces signature details
and the hex-encoded transaction suitable for broadcast using
:meth:`w3.eth.sendRawTransaction() <web3.eth.Eth.sendRawTransaction>`.
Create the transaction dict for a contract method with
`my_contract.functions.my_function().buildTransaction()
<http://web3py.readthedocs.io/en/latest/contracts.html#methods>`_
:param dict transaction_dict: the transaction with keys:
nonce, chainId, to, data, value, gas, and gasPrice.
:param private_key: the private key to sign the data with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: AttributeDict
.. code-block:: python
>>> transaction = {
# Note that the address must be in checksum format or native bytes:
'to': '0xF0109fC8DF283027b6285cc889F5aA624EaC1F55',
'value': 1000000000,
'gas': 2000000,
'gasPrice': 234567897654321,
'nonce': 0,
'chainId': 1
}
>>> key = '0x4c0883a69102937d6231471b5dbb6204fe5129617082792ae468d01a3f362318'
>>> signed = Account.signTransaction(transaction, key)
{'hash': HexBytes('0x6893a6ee8df79b0f5d64a180cd1ef35d030f3e296a5361cf04d02ce720d32ec5'),
'r': 4487286261793418179817841024889747115779324305375823110249149479905075174044,
'rawTransaction': HexBytes('0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428'), # noqa: E501
's': 30785525769477805655994251009256770582792548537338581640010273753578382951464,
'v': 37}
>>> w3.eth.sendRawTransaction(signed.rawTransaction)
'''
if not isinstance(transaction_dict, Mapping):
raise TypeError("transaction_dict must be dict-like, got %r" % transaction_dict)
account = self.privateKeyToAccount(private_key)
# allow from field, *only* if it matches the private key
if 'from' in transaction_dict:
if transaction_dict['from'] == account.address:
sanitized_transaction = dissoc(transaction_dict, 'from')
else:
raise TypeError("from field must match key's %s, but it was %s" % (
account.address,
transaction_dict['from'],
))
else:
sanitized_transaction = transaction_dict
# sign transaction
(
v,
r,
s,
rlp_encoded,
) = sign_transaction_dict(account._key_obj, sanitized_transaction)
transaction_hash = keccak(rlp_encoded)
return AttributeDict({
'rawTransaction': HexBytes(rlp_encoded),
'hash': HexBytes(transaction_hash),
'r': r,
's': s,
'v': v,
})
@combomethod
def _parsePrivateKey(self, key):
'''
Generate a :class:`eth_keys.datatypes.PrivateKey` from the provided key. If the
key is already of type :class:`eth_keys.datatypes.PrivateKey`, return the key.
:param key: the private key from which a :class:`eth_keys.datatypes.PrivateKey`
will be generated
:type key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: the provided key represented as a :class:`eth_keys.datatypes.PrivateKey`
'''
if isinstance(key, self._keys.PrivateKey):
return key
try:
return self._keys.PrivateKey(HexBytes(key))
except ValidationError as original_exception:
raise ValueError(
"The private key must be exactly 32 bytes long, instead of "
"%d bytes." % len(key)
) from original_exception
|
ethereum/eth-account
|
eth_account/account.py
|
Account.recoverHash
|
python
|
def recoverHash(self, message_hash, vrs=None, signature=None):
'''
Get the address of the account that signed the message with the given hash.
You must specify exactly one of: vrs or signature
:param message_hash: the hash of the message that you want to verify
:type message_hash: hex str or bytes or int
:param vrs: the three pieces generated by an elliptic curve signature
:type vrs: tuple(v, r, s), each element is hex str, bytes or int
:param signature: signature bytes concatenated as r+s+v
:type signature: hex str or bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> msg = "I♥SF"
>>> msghash = '0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'
>>> vrs = (
28,
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
# All of these recover calls are equivalent:
# variations on msghash
>>> msghash = b"\\x14v\\xab\\xb7E\\xd4#\\xbf\\t'?\\x1a\\xfd\\x88}\\x95\\x11\\x81\\xd2Z\\xdcf\\xc4\\x83JpI\\x19\\x11\\xb7\\xf7P" # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> msghash = 0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on vrs
>>> vrs = (
'0x1c',
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
b'\\x1c',
b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3', # noqa: E501
b'>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce') # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
0x1c,
0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3,
0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce)
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on signature
>>> signature = '0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce\\x1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = 0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
'''
hash_bytes = HexBytes(message_hash)
if len(hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
if vrs is not None:
v, r, s = map(hexstr_if_str(to_int), vrs)
v_standard = to_standard_v(v)
signature_obj = self._keys.Signature(vrs=(v_standard, r, s))
elif signature is not None:
signature_bytes = HexBytes(signature)
signature_bytes_standard = to_standard_signature_bytes(signature_bytes)
signature_obj = self._keys.Signature(signature_bytes=signature_bytes_standard)
else:
raise TypeError("You must supply the vrs tuple or the signature bytes")
pubkey = signature_obj.recover_public_key_from_msg_hash(hash_bytes)
return pubkey.to_checksum_address()
|
Get the address of the account that signed the message with the given hash.
You must specify exactly one of: vrs or signature
:param message_hash: the hash of the message that you want to verify
:type message_hash: hex str or bytes or int
:param vrs: the three pieces generated by an elliptic curve signature
:type vrs: tuple(v, r, s), each element is hex str, bytes or int
:param signature: signature bytes concatenated as r+s+v
:type signature: hex str or bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> msg = "I♥SF"
>>> msghash = '0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'
>>> vrs = (
28,
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
# All of these recover calls are equivalent:
# variations on msghash
>>> msghash = b"\\x14v\\xab\\xb7E\\xd4#\\xbf\\t'?\\x1a\\xfd\\x88}\\x95\\x11\\x81\\xd2Z\\xdcf\\xc4\\x83JpI\\x19\\x11\\xb7\\xf7P" # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> msghash = 0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on vrs
>>> vrs = (
'0x1c',
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
b'\\x1c',
b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3', # noqa: E501
b'>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce') # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
0x1c,
0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3,
0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce)
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on signature
>>> signature = '0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce\\x1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = 0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
|
train
|
https://github.com/ethereum/eth-account/blob/335199b815ae34fea87f1523e2f29777fd52946e/eth_account/account.py#L225-L297
|
[
"def to_standard_signature_bytes(ethereum_signature_bytes):\n rs = ethereum_signature_bytes[:-1]\n v = to_int(ethereum_signature_bytes[-1])\n standard_v = to_standard_v(v)\n return rs + to_bytes(standard_v)\n",
"def to_standard_v(enhanced_v):\n (_chain, chain_naive_v) = extract_chain_id(enhanced_v)\n v_standard = chain_naive_v - V_OFFSET\n assert v_standard in {0, 1}\n return v_standard\n"
] |
class Account(object):
'''
This is the primary entry point for working with Ethereum private keys.
It does **not** require a connection to an Ethereum node.
'''
_keys = keys
default_kdf = os.getenv('ETH_ACCOUNT_KDF', 'scrypt')
'''
The default key deriviation function (KDF) to use when encrypting a private key. If the
environment variable :envvar:`ETH_ACCOUNT_KDF` is set, it's value will be used as the default.
Otherwise, 'scrypt' will be used as the default.
'''
@combomethod
def create(self, extra_entropy=''):
'''
Creates a new private key, and returns it as a :class:`~eth_account.local.LocalAccount`.
:param extra_entropy: Add extra randomness to whatever randomness your OS can provide
:type extra_entropy: str or bytes or int
:returns: an object with private key and convenience methods
.. code-block:: python
>>> from eth_account import Account
>>> acct = Account.create('KEYSMASH FJAFJKLDSKF7JKFDJ 1530')
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
extra_key_bytes = text_if_str(to_bytes, extra_entropy)
key_bytes = keccak(os.urandom(32) + extra_key_bytes)
return self.privateKeyToAccount(key_bytes)
@staticmethod
def decrypt(keyfile_json, password):
'''
Decrypts a private key that was encrypted using an Ethereum client or
:meth:`~Account.encrypt`.
:param keyfile_json: The encrypted key
:type keyfile_json: dict or str
:param str password: The password that was used to encrypt the key
:returns: the raw private key
:rtype: ~hexbytes.main.HexBytes
.. code-block:: python
>>> encrypted = {
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {'cipher': 'aes-128-ctr',
'cipherparams': {'iv': '78f214584844e0b241b433d7c3bb8d5f'},
'ciphertext': 'd6dbb56e4f54ba6db2e8dc14df17cb7352fdce03681dd3f90ce4b6c1d5af2c4f',
'kdf': 'pbkdf2',
'kdfparams': {'c': 1000000,
'dklen': 32,
'prf': 'hmac-sha256',
'salt': '45cf943b4de2c05c2c440ef96af914a2'},
'mac': 'f5e1af09df5ded25c96fcf075ada313fb6f79735a914adc8cb02e8ddee7813c3'},
'id': 'b812f3f9-78cc-462a-9e89-74418aa27cb0',
'version': 3}
>>> import getpass
>>> Account.decrypt(encrypted, getpass.getpass())
HexBytes('0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364')
'''
if isinstance(keyfile_json, str):
keyfile = json.loads(keyfile_json)
elif is_dict(keyfile_json):
keyfile = keyfile_json
else:
raise TypeError("The keyfile should be supplied as a JSON string, or a dictionary.")
password_bytes = text_if_str(to_bytes, password)
return HexBytes(decode_keyfile_json(keyfile, password_bytes))
@classmethod
def encrypt(cls, private_key, password, kdf=None, iterations=None):
'''
Creates a dictionary with an encrypted version of your private key.
To import this keyfile into Ethereum clients like geth and parity:
encode this dictionary with :func:`json.dumps` and save it to disk where your
client keeps key files.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:param str password: The password which you will need to unlock the account in your client
:param str kdf: The key derivation function to use when encrypting your private key
:param int iterations: The work factor for the key derivation function
:returns: The data to use in your encrypted file
:rtype: dict
.. code-block:: python
>>> import getpass
>>> encrypted = Account.encrypt(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364,
getpass.getpass()
)
{
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {
'cipher': 'aes-128-ctr',
'cipherparams': {
'iv': '0b7845a5c3597d3d378bde9b7c7319b7'
},
'ciphertext': 'a494f1feb3c854e99c1ff01e6aaa17d43c0752009073503b908457dc8de5d2a5', # noqa: E501
'kdf': 'scrypt',
'kdfparams': {
'dklen': 32,
'n': 262144,
'p': 8,
'r': 1,
'salt': '13c4a48123affaa29189e9097726c698'
},
'mac': 'f4cfb027eb0af9bd7a320b4374a3fa7bef02cfbafe0ec5d1fd7ad129401de0b1'
},
'id': 'a60e0578-0e5b-4a75-b991-d55ec6451a6f',
'version': 3
}
>>> with open('my-keyfile', 'w') as f:
f.write(json.dumps(encrypted))
'''
if isinstance(private_key, keys.PrivateKey):
key_bytes = private_key.to_bytes()
else:
key_bytes = HexBytes(private_key)
if kdf is None:
kdf = cls.default_kdf
password_bytes = text_if_str(to_bytes, password)
assert len(key_bytes) == 32
return create_keyfile_json(key_bytes, password_bytes, kdf=kdf, iterations=iterations)
@combomethod
def privateKeyToAccount(self, private_key):
'''
Returns a convenient object for working with the given private key.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:return: object with methods for signing and encrypting
:rtype: LocalAccount
.. code-block:: python
>>> acct = Account.privateKeyToAccount(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364)
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
key = self._parsePrivateKey(private_key)
return LocalAccount(key, self)
@combomethod
@combomethod
def recoverTransaction(self, serialized_transaction):
'''
Get the address of the account that signed this transaction.
:param serialized_transaction: the complete signed transaction
:type serialized_transaction: hex str, bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> raw_transaction = '0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428', # noqa: E501
>>> Account.recoverTransaction(raw_transaction)
'0x2c7536E3605D9C16a7a3D7b1898e529396a65c23'
'''
txn_bytes = HexBytes(serialized_transaction)
txn = Transaction.from_bytes(txn_bytes)
msg_hash = hash_of_signed_transaction(txn)
return self.recoverHash(msg_hash, vrs=vrs_from(txn))
def setKeyBackend(self, backend):
'''
Change the backend used by the underlying eth-keys library.
*(The default is fine for most users)*
:param backend: any backend that works in
`eth_keys.KeyApi(backend) <https://github.com/ethereum/eth-keys/#keyapibackendnone>`_
'''
self._keys = KeyAPI(backend)
@combomethod
def signHash(self, message_hash, private_key):
'''
Sign the hash provided.
.. WARNING:: *Never* sign a hash that you didn't generate,
it can be an arbitrary transaction. For example, it might
send all of your account's ether to an attacker.
If you would like compatibility with
:meth:`w3.eth.sign() <web3.eth.Eth.sign>`
you can use :meth:`~eth_account.messages.defunct_hash_message`.
Several other message standards are proposed, but none have a clear
consensus. You'll need to manually comply with any of those message standards manually.
:param message_hash: the 32-byte message hash to be signed
:type message_hash: hex str, bytes or int
:param private_key: the key to sign the message with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: ~eth_account.datastructures.AttributeDict
.. code-block:: python
>>> msg = "I♥SF"
>>> from eth_account.messages import defunct_hash_message
>>> msghash = defunct_hash_message(text=msg)
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
>>> key = "0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364"
>>> Account.signHash(msghash, key)
{'messageHash': HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'), # noqa: E501
'r': 104389933075820307925104709181714897380569894203213074526835978196648170704563,
's': 28205917190874851400050446352651915501321657673772411533993420917949420456142,
'signature': HexBytes('0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c'), # noqa: E501
'v': 28}
# these are equivalent:
>>> Account.signHash(
0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750,
key
)
>>> Account.signHash(
"0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750",
key
)
'''
msg_hash_bytes = HexBytes(message_hash)
if len(msg_hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
key = self._parsePrivateKey(private_key)
(v, r, s, eth_signature_bytes) = sign_message_hash(key, msg_hash_bytes)
return AttributeDict({
'messageHash': msg_hash_bytes,
'r': r,
's': s,
'v': v,
'signature': HexBytes(eth_signature_bytes),
})
@combomethod
def signTransaction(self, transaction_dict, private_key):
'''
Sign a transaction using a local private key. Produces signature details
and the hex-encoded transaction suitable for broadcast using
:meth:`w3.eth.sendRawTransaction() <web3.eth.Eth.sendRawTransaction>`.
Create the transaction dict for a contract method with
`my_contract.functions.my_function().buildTransaction()
<http://web3py.readthedocs.io/en/latest/contracts.html#methods>`_
:param dict transaction_dict: the transaction with keys:
nonce, chainId, to, data, value, gas, and gasPrice.
:param private_key: the private key to sign the data with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: AttributeDict
.. code-block:: python
>>> transaction = {
# Note that the address must be in checksum format or native bytes:
'to': '0xF0109fC8DF283027b6285cc889F5aA624EaC1F55',
'value': 1000000000,
'gas': 2000000,
'gasPrice': 234567897654321,
'nonce': 0,
'chainId': 1
}
>>> key = '0x4c0883a69102937d6231471b5dbb6204fe5129617082792ae468d01a3f362318'
>>> signed = Account.signTransaction(transaction, key)
{'hash': HexBytes('0x6893a6ee8df79b0f5d64a180cd1ef35d030f3e296a5361cf04d02ce720d32ec5'),
'r': 4487286261793418179817841024889747115779324305375823110249149479905075174044,
'rawTransaction': HexBytes('0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428'), # noqa: E501
's': 30785525769477805655994251009256770582792548537338581640010273753578382951464,
'v': 37}
>>> w3.eth.sendRawTransaction(signed.rawTransaction)
'''
if not isinstance(transaction_dict, Mapping):
raise TypeError("transaction_dict must be dict-like, got %r" % transaction_dict)
account = self.privateKeyToAccount(private_key)
# allow from field, *only* if it matches the private key
if 'from' in transaction_dict:
if transaction_dict['from'] == account.address:
sanitized_transaction = dissoc(transaction_dict, 'from')
else:
raise TypeError("from field must match key's %s, but it was %s" % (
account.address,
transaction_dict['from'],
))
else:
sanitized_transaction = transaction_dict
# sign transaction
(
v,
r,
s,
rlp_encoded,
) = sign_transaction_dict(account._key_obj, sanitized_transaction)
transaction_hash = keccak(rlp_encoded)
return AttributeDict({
'rawTransaction': HexBytes(rlp_encoded),
'hash': HexBytes(transaction_hash),
'r': r,
's': s,
'v': v,
})
@combomethod
def _parsePrivateKey(self, key):
'''
Generate a :class:`eth_keys.datatypes.PrivateKey` from the provided key. If the
key is already of type :class:`eth_keys.datatypes.PrivateKey`, return the key.
:param key: the private key from which a :class:`eth_keys.datatypes.PrivateKey`
will be generated
:type key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: the provided key represented as a :class:`eth_keys.datatypes.PrivateKey`
'''
if isinstance(key, self._keys.PrivateKey):
return key
try:
return self._keys.PrivateKey(HexBytes(key))
except ValidationError as original_exception:
raise ValueError(
"The private key must be exactly 32 bytes long, instead of "
"%d bytes." % len(key)
) from original_exception
|
ethereum/eth-account
|
eth_account/account.py
|
Account.recoverTransaction
|
python
|
def recoverTransaction(self, serialized_transaction):
'''
Get the address of the account that signed this transaction.
:param serialized_transaction: the complete signed transaction
:type serialized_transaction: hex str, bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> raw_transaction = '0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428', # noqa: E501
>>> Account.recoverTransaction(raw_transaction)
'0x2c7536E3605D9C16a7a3D7b1898e529396a65c23'
'''
txn_bytes = HexBytes(serialized_transaction)
txn = Transaction.from_bytes(txn_bytes)
msg_hash = hash_of_signed_transaction(txn)
return self.recoverHash(msg_hash, vrs=vrs_from(txn))
|
Get the address of the account that signed this transaction.
:param serialized_transaction: the complete signed transaction
:type serialized_transaction: hex str, bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> raw_transaction = '0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428', # noqa: E501
>>> Account.recoverTransaction(raw_transaction)
'0x2c7536E3605D9C16a7a3D7b1898e529396a65c23'
|
train
|
https://github.com/ethereum/eth-account/blob/335199b815ae34fea87f1523e2f29777fd52946e/eth_account/account.py#L300-L318
|
[
"def hash_of_signed_transaction(txn_obj):\n '''\n Regenerate the hash of the signed transaction object.\n\n 1. Infer the chain ID from the signature\n 2. Strip out signature from transaction\n 3. Annotate the transaction with that ID, if available\n 4. Take the hash of the serialized, unsigned, chain-aware transaction\n\n Chain ID inference and annotation is according to EIP-155\n See details at https://github.com/ethereum/EIPs/blob/master/EIPS/eip-155.md\n\n :return: the hash of the provided transaction, to be signed\n '''\n (chain_id, _v) = extract_chain_id(txn_obj.v)\n unsigned_parts = strip_signature(txn_obj)\n if chain_id is None:\n signable_transaction = UnsignedTransaction(*unsigned_parts)\n else:\n extended_transaction = unsigned_parts + [chain_id, 0, 0]\n signable_transaction = ChainAwareUnsignedTransaction(*extended_transaction)\n return signable_transaction.hash()\n",
"def vrs_from(transaction):\n return (getattr(transaction, part) for part in 'vrs')\n"
] |
class Account(object):
'''
This is the primary entry point for working with Ethereum private keys.
It does **not** require a connection to an Ethereum node.
'''
_keys = keys
default_kdf = os.getenv('ETH_ACCOUNT_KDF', 'scrypt')
'''
The default key deriviation function (KDF) to use when encrypting a private key. If the
environment variable :envvar:`ETH_ACCOUNT_KDF` is set, it's value will be used as the default.
Otherwise, 'scrypt' will be used as the default.
'''
@combomethod
def create(self, extra_entropy=''):
'''
Creates a new private key, and returns it as a :class:`~eth_account.local.LocalAccount`.
:param extra_entropy: Add extra randomness to whatever randomness your OS can provide
:type extra_entropy: str or bytes or int
:returns: an object with private key and convenience methods
.. code-block:: python
>>> from eth_account import Account
>>> acct = Account.create('KEYSMASH FJAFJKLDSKF7JKFDJ 1530')
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
extra_key_bytes = text_if_str(to_bytes, extra_entropy)
key_bytes = keccak(os.urandom(32) + extra_key_bytes)
return self.privateKeyToAccount(key_bytes)
@staticmethod
def decrypt(keyfile_json, password):
'''
Decrypts a private key that was encrypted using an Ethereum client or
:meth:`~Account.encrypt`.
:param keyfile_json: The encrypted key
:type keyfile_json: dict or str
:param str password: The password that was used to encrypt the key
:returns: the raw private key
:rtype: ~hexbytes.main.HexBytes
.. code-block:: python
>>> encrypted = {
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {'cipher': 'aes-128-ctr',
'cipherparams': {'iv': '78f214584844e0b241b433d7c3bb8d5f'},
'ciphertext': 'd6dbb56e4f54ba6db2e8dc14df17cb7352fdce03681dd3f90ce4b6c1d5af2c4f',
'kdf': 'pbkdf2',
'kdfparams': {'c': 1000000,
'dklen': 32,
'prf': 'hmac-sha256',
'salt': '45cf943b4de2c05c2c440ef96af914a2'},
'mac': 'f5e1af09df5ded25c96fcf075ada313fb6f79735a914adc8cb02e8ddee7813c3'},
'id': 'b812f3f9-78cc-462a-9e89-74418aa27cb0',
'version': 3}
>>> import getpass
>>> Account.decrypt(encrypted, getpass.getpass())
HexBytes('0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364')
'''
if isinstance(keyfile_json, str):
keyfile = json.loads(keyfile_json)
elif is_dict(keyfile_json):
keyfile = keyfile_json
else:
raise TypeError("The keyfile should be supplied as a JSON string, or a dictionary.")
password_bytes = text_if_str(to_bytes, password)
return HexBytes(decode_keyfile_json(keyfile, password_bytes))
@classmethod
def encrypt(cls, private_key, password, kdf=None, iterations=None):
'''
Creates a dictionary with an encrypted version of your private key.
To import this keyfile into Ethereum clients like geth and parity:
encode this dictionary with :func:`json.dumps` and save it to disk where your
client keeps key files.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:param str password: The password which you will need to unlock the account in your client
:param str kdf: The key derivation function to use when encrypting your private key
:param int iterations: The work factor for the key derivation function
:returns: The data to use in your encrypted file
:rtype: dict
.. code-block:: python
>>> import getpass
>>> encrypted = Account.encrypt(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364,
getpass.getpass()
)
{
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {
'cipher': 'aes-128-ctr',
'cipherparams': {
'iv': '0b7845a5c3597d3d378bde9b7c7319b7'
},
'ciphertext': 'a494f1feb3c854e99c1ff01e6aaa17d43c0752009073503b908457dc8de5d2a5', # noqa: E501
'kdf': 'scrypt',
'kdfparams': {
'dklen': 32,
'n': 262144,
'p': 8,
'r': 1,
'salt': '13c4a48123affaa29189e9097726c698'
},
'mac': 'f4cfb027eb0af9bd7a320b4374a3fa7bef02cfbafe0ec5d1fd7ad129401de0b1'
},
'id': 'a60e0578-0e5b-4a75-b991-d55ec6451a6f',
'version': 3
}
>>> with open('my-keyfile', 'w') as f:
f.write(json.dumps(encrypted))
'''
if isinstance(private_key, keys.PrivateKey):
key_bytes = private_key.to_bytes()
else:
key_bytes = HexBytes(private_key)
if kdf is None:
kdf = cls.default_kdf
password_bytes = text_if_str(to_bytes, password)
assert len(key_bytes) == 32
return create_keyfile_json(key_bytes, password_bytes, kdf=kdf, iterations=iterations)
@combomethod
def privateKeyToAccount(self, private_key):
'''
Returns a convenient object for working with the given private key.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:return: object with methods for signing and encrypting
:rtype: LocalAccount
.. code-block:: python
>>> acct = Account.privateKeyToAccount(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364)
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
key = self._parsePrivateKey(private_key)
return LocalAccount(key, self)
@combomethod
def recoverHash(self, message_hash, vrs=None, signature=None):
'''
Get the address of the account that signed the message with the given hash.
You must specify exactly one of: vrs or signature
:param message_hash: the hash of the message that you want to verify
:type message_hash: hex str or bytes or int
:param vrs: the three pieces generated by an elliptic curve signature
:type vrs: tuple(v, r, s), each element is hex str, bytes or int
:param signature: signature bytes concatenated as r+s+v
:type signature: hex str or bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> msg = "I♥SF"
>>> msghash = '0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'
>>> vrs = (
28,
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
# All of these recover calls are equivalent:
# variations on msghash
>>> msghash = b"\\x14v\\xab\\xb7E\\xd4#\\xbf\\t'?\\x1a\\xfd\\x88}\\x95\\x11\\x81\\xd2Z\\xdcf\\xc4\\x83JpI\\x19\\x11\\xb7\\xf7P" # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> msghash = 0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on vrs
>>> vrs = (
'0x1c',
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
b'\\x1c',
b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3', # noqa: E501
b'>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce') # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
0x1c,
0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3,
0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce)
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on signature
>>> signature = '0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce\\x1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = 0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
'''
hash_bytes = HexBytes(message_hash)
if len(hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
if vrs is not None:
v, r, s = map(hexstr_if_str(to_int), vrs)
v_standard = to_standard_v(v)
signature_obj = self._keys.Signature(vrs=(v_standard, r, s))
elif signature is not None:
signature_bytes = HexBytes(signature)
signature_bytes_standard = to_standard_signature_bytes(signature_bytes)
signature_obj = self._keys.Signature(signature_bytes=signature_bytes_standard)
else:
raise TypeError("You must supply the vrs tuple or the signature bytes")
pubkey = signature_obj.recover_public_key_from_msg_hash(hash_bytes)
return pubkey.to_checksum_address()
@combomethod
def setKeyBackend(self, backend):
'''
Change the backend used by the underlying eth-keys library.
*(The default is fine for most users)*
:param backend: any backend that works in
`eth_keys.KeyApi(backend) <https://github.com/ethereum/eth-keys/#keyapibackendnone>`_
'''
self._keys = KeyAPI(backend)
@combomethod
def signHash(self, message_hash, private_key):
'''
Sign the hash provided.
.. WARNING:: *Never* sign a hash that you didn't generate,
it can be an arbitrary transaction. For example, it might
send all of your account's ether to an attacker.
If you would like compatibility with
:meth:`w3.eth.sign() <web3.eth.Eth.sign>`
you can use :meth:`~eth_account.messages.defunct_hash_message`.
Several other message standards are proposed, but none have a clear
consensus. You'll need to manually comply with any of those message standards manually.
:param message_hash: the 32-byte message hash to be signed
:type message_hash: hex str, bytes or int
:param private_key: the key to sign the message with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: ~eth_account.datastructures.AttributeDict
.. code-block:: python
>>> msg = "I♥SF"
>>> from eth_account.messages import defunct_hash_message
>>> msghash = defunct_hash_message(text=msg)
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
>>> key = "0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364"
>>> Account.signHash(msghash, key)
{'messageHash': HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'), # noqa: E501
'r': 104389933075820307925104709181714897380569894203213074526835978196648170704563,
's': 28205917190874851400050446352651915501321657673772411533993420917949420456142,
'signature': HexBytes('0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c'), # noqa: E501
'v': 28}
# these are equivalent:
>>> Account.signHash(
0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750,
key
)
>>> Account.signHash(
"0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750",
key
)
'''
msg_hash_bytes = HexBytes(message_hash)
if len(msg_hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
key = self._parsePrivateKey(private_key)
(v, r, s, eth_signature_bytes) = sign_message_hash(key, msg_hash_bytes)
return AttributeDict({
'messageHash': msg_hash_bytes,
'r': r,
's': s,
'v': v,
'signature': HexBytes(eth_signature_bytes),
})
@combomethod
def signTransaction(self, transaction_dict, private_key):
'''
Sign a transaction using a local private key. Produces signature details
and the hex-encoded transaction suitable for broadcast using
:meth:`w3.eth.sendRawTransaction() <web3.eth.Eth.sendRawTransaction>`.
Create the transaction dict for a contract method with
`my_contract.functions.my_function().buildTransaction()
<http://web3py.readthedocs.io/en/latest/contracts.html#methods>`_
:param dict transaction_dict: the transaction with keys:
nonce, chainId, to, data, value, gas, and gasPrice.
:param private_key: the private key to sign the data with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: AttributeDict
.. code-block:: python
>>> transaction = {
# Note that the address must be in checksum format or native bytes:
'to': '0xF0109fC8DF283027b6285cc889F5aA624EaC1F55',
'value': 1000000000,
'gas': 2000000,
'gasPrice': 234567897654321,
'nonce': 0,
'chainId': 1
}
>>> key = '0x4c0883a69102937d6231471b5dbb6204fe5129617082792ae468d01a3f362318'
>>> signed = Account.signTransaction(transaction, key)
{'hash': HexBytes('0x6893a6ee8df79b0f5d64a180cd1ef35d030f3e296a5361cf04d02ce720d32ec5'),
'r': 4487286261793418179817841024889747115779324305375823110249149479905075174044,
'rawTransaction': HexBytes('0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428'), # noqa: E501
's': 30785525769477805655994251009256770582792548537338581640010273753578382951464,
'v': 37}
>>> w3.eth.sendRawTransaction(signed.rawTransaction)
'''
if not isinstance(transaction_dict, Mapping):
raise TypeError("transaction_dict must be dict-like, got %r" % transaction_dict)
account = self.privateKeyToAccount(private_key)
# allow from field, *only* if it matches the private key
if 'from' in transaction_dict:
if transaction_dict['from'] == account.address:
sanitized_transaction = dissoc(transaction_dict, 'from')
else:
raise TypeError("from field must match key's %s, but it was %s" % (
account.address,
transaction_dict['from'],
))
else:
sanitized_transaction = transaction_dict
# sign transaction
(
v,
r,
s,
rlp_encoded,
) = sign_transaction_dict(account._key_obj, sanitized_transaction)
transaction_hash = keccak(rlp_encoded)
return AttributeDict({
'rawTransaction': HexBytes(rlp_encoded),
'hash': HexBytes(transaction_hash),
'r': r,
's': s,
'v': v,
})
@combomethod
def _parsePrivateKey(self, key):
'''
Generate a :class:`eth_keys.datatypes.PrivateKey` from the provided key. If the
key is already of type :class:`eth_keys.datatypes.PrivateKey`, return the key.
:param key: the private key from which a :class:`eth_keys.datatypes.PrivateKey`
will be generated
:type key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: the provided key represented as a :class:`eth_keys.datatypes.PrivateKey`
'''
if isinstance(key, self._keys.PrivateKey):
return key
try:
return self._keys.PrivateKey(HexBytes(key))
except ValidationError as original_exception:
raise ValueError(
"The private key must be exactly 32 bytes long, instead of "
"%d bytes." % len(key)
) from original_exception
|
ethereum/eth-account
|
eth_account/account.py
|
Account.signHash
|
python
|
def signHash(self, message_hash, private_key):
'''
Sign the hash provided.
.. WARNING:: *Never* sign a hash that you didn't generate,
it can be an arbitrary transaction. For example, it might
send all of your account's ether to an attacker.
If you would like compatibility with
:meth:`w3.eth.sign() <web3.eth.Eth.sign>`
you can use :meth:`~eth_account.messages.defunct_hash_message`.
Several other message standards are proposed, but none have a clear
consensus. You'll need to manually comply with any of those message standards manually.
:param message_hash: the 32-byte message hash to be signed
:type message_hash: hex str, bytes or int
:param private_key: the key to sign the message with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: ~eth_account.datastructures.AttributeDict
.. code-block:: python
>>> msg = "I♥SF"
>>> from eth_account.messages import defunct_hash_message
>>> msghash = defunct_hash_message(text=msg)
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
>>> key = "0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364"
>>> Account.signHash(msghash, key)
{'messageHash': HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'), # noqa: E501
'r': 104389933075820307925104709181714897380569894203213074526835978196648170704563,
's': 28205917190874851400050446352651915501321657673772411533993420917949420456142,
'signature': HexBytes('0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c'), # noqa: E501
'v': 28}
# these are equivalent:
>>> Account.signHash(
0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750,
key
)
>>> Account.signHash(
"0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750",
key
)
'''
msg_hash_bytes = HexBytes(message_hash)
if len(msg_hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
key = self._parsePrivateKey(private_key)
(v, r, s, eth_signature_bytes) = sign_message_hash(key, msg_hash_bytes)
return AttributeDict({
'messageHash': msg_hash_bytes,
'r': r,
's': s,
'v': v,
'signature': HexBytes(eth_signature_bytes),
})
|
Sign the hash provided.
.. WARNING:: *Never* sign a hash that you didn't generate,
it can be an arbitrary transaction. For example, it might
send all of your account's ether to an attacker.
If you would like compatibility with
:meth:`w3.eth.sign() <web3.eth.Eth.sign>`
you can use :meth:`~eth_account.messages.defunct_hash_message`.
Several other message standards are proposed, but none have a clear
consensus. You'll need to manually comply with any of those message standards manually.
:param message_hash: the 32-byte message hash to be signed
:type message_hash: hex str, bytes or int
:param private_key: the key to sign the message with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: ~eth_account.datastructures.AttributeDict
.. code-block:: python
>>> msg = "I♥SF"
>>> from eth_account.messages import defunct_hash_message
>>> msghash = defunct_hash_message(text=msg)
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
>>> key = "0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364"
>>> Account.signHash(msghash, key)
{'messageHash': HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'), # noqa: E501
'r': 104389933075820307925104709181714897380569894203213074526835978196648170704563,
's': 28205917190874851400050446352651915501321657673772411533993420917949420456142,
'signature': HexBytes('0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c'), # noqa: E501
'v': 28}
# these are equivalent:
>>> Account.signHash(
0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750,
key
)
>>> Account.signHash(
"0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750",
key
)
|
train
|
https://github.com/ethereum/eth-account/blob/335199b815ae34fea87f1523e2f29777fd52946e/eth_account/account.py#L333-L393
|
[
"def sign_message_hash(key, msg_hash):\n signature = key.sign_msg_hash(msg_hash)\n (v_raw, r, s) = signature.vrs\n v = to_eth_v(v_raw)\n eth_signature_bytes = to_bytes32(r) + to_bytes32(s) + to_bytes(v)\n return (v, r, s, eth_signature_bytes)\n"
] |
class Account(object):
'''
This is the primary entry point for working with Ethereum private keys.
It does **not** require a connection to an Ethereum node.
'''
_keys = keys
default_kdf = os.getenv('ETH_ACCOUNT_KDF', 'scrypt')
'''
The default key deriviation function (KDF) to use when encrypting a private key. If the
environment variable :envvar:`ETH_ACCOUNT_KDF` is set, it's value will be used as the default.
Otherwise, 'scrypt' will be used as the default.
'''
@combomethod
def create(self, extra_entropy=''):
'''
Creates a new private key, and returns it as a :class:`~eth_account.local.LocalAccount`.
:param extra_entropy: Add extra randomness to whatever randomness your OS can provide
:type extra_entropy: str or bytes or int
:returns: an object with private key and convenience methods
.. code-block:: python
>>> from eth_account import Account
>>> acct = Account.create('KEYSMASH FJAFJKLDSKF7JKFDJ 1530')
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
extra_key_bytes = text_if_str(to_bytes, extra_entropy)
key_bytes = keccak(os.urandom(32) + extra_key_bytes)
return self.privateKeyToAccount(key_bytes)
@staticmethod
def decrypt(keyfile_json, password):
'''
Decrypts a private key that was encrypted using an Ethereum client or
:meth:`~Account.encrypt`.
:param keyfile_json: The encrypted key
:type keyfile_json: dict or str
:param str password: The password that was used to encrypt the key
:returns: the raw private key
:rtype: ~hexbytes.main.HexBytes
.. code-block:: python
>>> encrypted = {
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {'cipher': 'aes-128-ctr',
'cipherparams': {'iv': '78f214584844e0b241b433d7c3bb8d5f'},
'ciphertext': 'd6dbb56e4f54ba6db2e8dc14df17cb7352fdce03681dd3f90ce4b6c1d5af2c4f',
'kdf': 'pbkdf2',
'kdfparams': {'c': 1000000,
'dklen': 32,
'prf': 'hmac-sha256',
'salt': '45cf943b4de2c05c2c440ef96af914a2'},
'mac': 'f5e1af09df5ded25c96fcf075ada313fb6f79735a914adc8cb02e8ddee7813c3'},
'id': 'b812f3f9-78cc-462a-9e89-74418aa27cb0',
'version': 3}
>>> import getpass
>>> Account.decrypt(encrypted, getpass.getpass())
HexBytes('0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364')
'''
if isinstance(keyfile_json, str):
keyfile = json.loads(keyfile_json)
elif is_dict(keyfile_json):
keyfile = keyfile_json
else:
raise TypeError("The keyfile should be supplied as a JSON string, or a dictionary.")
password_bytes = text_if_str(to_bytes, password)
return HexBytes(decode_keyfile_json(keyfile, password_bytes))
@classmethod
def encrypt(cls, private_key, password, kdf=None, iterations=None):
'''
Creates a dictionary with an encrypted version of your private key.
To import this keyfile into Ethereum clients like geth and parity:
encode this dictionary with :func:`json.dumps` and save it to disk where your
client keeps key files.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:param str password: The password which you will need to unlock the account in your client
:param str kdf: The key derivation function to use when encrypting your private key
:param int iterations: The work factor for the key derivation function
:returns: The data to use in your encrypted file
:rtype: dict
.. code-block:: python
>>> import getpass
>>> encrypted = Account.encrypt(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364,
getpass.getpass()
)
{
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {
'cipher': 'aes-128-ctr',
'cipherparams': {
'iv': '0b7845a5c3597d3d378bde9b7c7319b7'
},
'ciphertext': 'a494f1feb3c854e99c1ff01e6aaa17d43c0752009073503b908457dc8de5d2a5', # noqa: E501
'kdf': 'scrypt',
'kdfparams': {
'dklen': 32,
'n': 262144,
'p': 8,
'r': 1,
'salt': '13c4a48123affaa29189e9097726c698'
},
'mac': 'f4cfb027eb0af9bd7a320b4374a3fa7bef02cfbafe0ec5d1fd7ad129401de0b1'
},
'id': 'a60e0578-0e5b-4a75-b991-d55ec6451a6f',
'version': 3
}
>>> with open('my-keyfile', 'w') as f:
f.write(json.dumps(encrypted))
'''
if isinstance(private_key, keys.PrivateKey):
key_bytes = private_key.to_bytes()
else:
key_bytes = HexBytes(private_key)
if kdf is None:
kdf = cls.default_kdf
password_bytes = text_if_str(to_bytes, password)
assert len(key_bytes) == 32
return create_keyfile_json(key_bytes, password_bytes, kdf=kdf, iterations=iterations)
@combomethod
def privateKeyToAccount(self, private_key):
'''
Returns a convenient object for working with the given private key.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:return: object with methods for signing and encrypting
:rtype: LocalAccount
.. code-block:: python
>>> acct = Account.privateKeyToAccount(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364)
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
key = self._parsePrivateKey(private_key)
return LocalAccount(key, self)
@combomethod
def recoverHash(self, message_hash, vrs=None, signature=None):
'''
Get the address of the account that signed the message with the given hash.
You must specify exactly one of: vrs or signature
:param message_hash: the hash of the message that you want to verify
:type message_hash: hex str or bytes or int
:param vrs: the three pieces generated by an elliptic curve signature
:type vrs: tuple(v, r, s), each element is hex str, bytes or int
:param signature: signature bytes concatenated as r+s+v
:type signature: hex str or bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> msg = "I♥SF"
>>> msghash = '0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'
>>> vrs = (
28,
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
# All of these recover calls are equivalent:
# variations on msghash
>>> msghash = b"\\x14v\\xab\\xb7E\\xd4#\\xbf\\t'?\\x1a\\xfd\\x88}\\x95\\x11\\x81\\xd2Z\\xdcf\\xc4\\x83JpI\\x19\\x11\\xb7\\xf7P" # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> msghash = 0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on vrs
>>> vrs = (
'0x1c',
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
b'\\x1c',
b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3', # noqa: E501
b'>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce') # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
0x1c,
0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3,
0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce)
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on signature
>>> signature = '0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce\\x1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = 0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
'''
hash_bytes = HexBytes(message_hash)
if len(hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
if vrs is not None:
v, r, s = map(hexstr_if_str(to_int), vrs)
v_standard = to_standard_v(v)
signature_obj = self._keys.Signature(vrs=(v_standard, r, s))
elif signature is not None:
signature_bytes = HexBytes(signature)
signature_bytes_standard = to_standard_signature_bytes(signature_bytes)
signature_obj = self._keys.Signature(signature_bytes=signature_bytes_standard)
else:
raise TypeError("You must supply the vrs tuple or the signature bytes")
pubkey = signature_obj.recover_public_key_from_msg_hash(hash_bytes)
return pubkey.to_checksum_address()
@combomethod
def recoverTransaction(self, serialized_transaction):
'''
Get the address of the account that signed this transaction.
:param serialized_transaction: the complete signed transaction
:type serialized_transaction: hex str, bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> raw_transaction = '0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428', # noqa: E501
>>> Account.recoverTransaction(raw_transaction)
'0x2c7536E3605D9C16a7a3D7b1898e529396a65c23'
'''
txn_bytes = HexBytes(serialized_transaction)
txn = Transaction.from_bytes(txn_bytes)
msg_hash = hash_of_signed_transaction(txn)
return self.recoverHash(msg_hash, vrs=vrs_from(txn))
def setKeyBackend(self, backend):
'''
Change the backend used by the underlying eth-keys library.
*(The default is fine for most users)*
:param backend: any backend that works in
`eth_keys.KeyApi(backend) <https://github.com/ethereum/eth-keys/#keyapibackendnone>`_
'''
self._keys = KeyAPI(backend)
@combomethod
@combomethod
def signTransaction(self, transaction_dict, private_key):
'''
Sign a transaction using a local private key. Produces signature details
and the hex-encoded transaction suitable for broadcast using
:meth:`w3.eth.sendRawTransaction() <web3.eth.Eth.sendRawTransaction>`.
Create the transaction dict for a contract method with
`my_contract.functions.my_function().buildTransaction()
<http://web3py.readthedocs.io/en/latest/contracts.html#methods>`_
:param dict transaction_dict: the transaction with keys:
nonce, chainId, to, data, value, gas, and gasPrice.
:param private_key: the private key to sign the data with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: AttributeDict
.. code-block:: python
>>> transaction = {
# Note that the address must be in checksum format or native bytes:
'to': '0xF0109fC8DF283027b6285cc889F5aA624EaC1F55',
'value': 1000000000,
'gas': 2000000,
'gasPrice': 234567897654321,
'nonce': 0,
'chainId': 1
}
>>> key = '0x4c0883a69102937d6231471b5dbb6204fe5129617082792ae468d01a3f362318'
>>> signed = Account.signTransaction(transaction, key)
{'hash': HexBytes('0x6893a6ee8df79b0f5d64a180cd1ef35d030f3e296a5361cf04d02ce720d32ec5'),
'r': 4487286261793418179817841024889747115779324305375823110249149479905075174044,
'rawTransaction': HexBytes('0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428'), # noqa: E501
's': 30785525769477805655994251009256770582792548537338581640010273753578382951464,
'v': 37}
>>> w3.eth.sendRawTransaction(signed.rawTransaction)
'''
if not isinstance(transaction_dict, Mapping):
raise TypeError("transaction_dict must be dict-like, got %r" % transaction_dict)
account = self.privateKeyToAccount(private_key)
# allow from field, *only* if it matches the private key
if 'from' in transaction_dict:
if transaction_dict['from'] == account.address:
sanitized_transaction = dissoc(transaction_dict, 'from')
else:
raise TypeError("from field must match key's %s, but it was %s" % (
account.address,
transaction_dict['from'],
))
else:
sanitized_transaction = transaction_dict
# sign transaction
(
v,
r,
s,
rlp_encoded,
) = sign_transaction_dict(account._key_obj, sanitized_transaction)
transaction_hash = keccak(rlp_encoded)
return AttributeDict({
'rawTransaction': HexBytes(rlp_encoded),
'hash': HexBytes(transaction_hash),
'r': r,
's': s,
'v': v,
})
@combomethod
def _parsePrivateKey(self, key):
'''
Generate a :class:`eth_keys.datatypes.PrivateKey` from the provided key. If the
key is already of type :class:`eth_keys.datatypes.PrivateKey`, return the key.
:param key: the private key from which a :class:`eth_keys.datatypes.PrivateKey`
will be generated
:type key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: the provided key represented as a :class:`eth_keys.datatypes.PrivateKey`
'''
if isinstance(key, self._keys.PrivateKey):
return key
try:
return self._keys.PrivateKey(HexBytes(key))
except ValidationError as original_exception:
raise ValueError(
"The private key must be exactly 32 bytes long, instead of "
"%d bytes." % len(key)
) from original_exception
|
ethereum/eth-account
|
eth_account/account.py
|
Account.signTransaction
|
python
|
def signTransaction(self, transaction_dict, private_key):
'''
Sign a transaction using a local private key. Produces signature details
and the hex-encoded transaction suitable for broadcast using
:meth:`w3.eth.sendRawTransaction() <web3.eth.Eth.sendRawTransaction>`.
Create the transaction dict for a contract method with
`my_contract.functions.my_function().buildTransaction()
<http://web3py.readthedocs.io/en/latest/contracts.html#methods>`_
:param dict transaction_dict: the transaction with keys:
nonce, chainId, to, data, value, gas, and gasPrice.
:param private_key: the private key to sign the data with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: AttributeDict
.. code-block:: python
>>> transaction = {
# Note that the address must be in checksum format or native bytes:
'to': '0xF0109fC8DF283027b6285cc889F5aA624EaC1F55',
'value': 1000000000,
'gas': 2000000,
'gasPrice': 234567897654321,
'nonce': 0,
'chainId': 1
}
>>> key = '0x4c0883a69102937d6231471b5dbb6204fe5129617082792ae468d01a3f362318'
>>> signed = Account.signTransaction(transaction, key)
{'hash': HexBytes('0x6893a6ee8df79b0f5d64a180cd1ef35d030f3e296a5361cf04d02ce720d32ec5'),
'r': 4487286261793418179817841024889747115779324305375823110249149479905075174044,
'rawTransaction': HexBytes('0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428'), # noqa: E501
's': 30785525769477805655994251009256770582792548537338581640010273753578382951464,
'v': 37}
>>> w3.eth.sendRawTransaction(signed.rawTransaction)
'''
if not isinstance(transaction_dict, Mapping):
raise TypeError("transaction_dict must be dict-like, got %r" % transaction_dict)
account = self.privateKeyToAccount(private_key)
# allow from field, *only* if it matches the private key
if 'from' in transaction_dict:
if transaction_dict['from'] == account.address:
sanitized_transaction = dissoc(transaction_dict, 'from')
else:
raise TypeError("from field must match key's %s, but it was %s" % (
account.address,
transaction_dict['from'],
))
else:
sanitized_transaction = transaction_dict
# sign transaction
(
v,
r,
s,
rlp_encoded,
) = sign_transaction_dict(account._key_obj, sanitized_transaction)
transaction_hash = keccak(rlp_encoded)
return AttributeDict({
'rawTransaction': HexBytes(rlp_encoded),
'hash': HexBytes(transaction_hash),
'r': r,
's': s,
'v': v,
})
|
Sign a transaction using a local private key. Produces signature details
and the hex-encoded transaction suitable for broadcast using
:meth:`w3.eth.sendRawTransaction() <web3.eth.Eth.sendRawTransaction>`.
Create the transaction dict for a contract method with
`my_contract.functions.my_function().buildTransaction()
<http://web3py.readthedocs.io/en/latest/contracts.html#methods>`_
:param dict transaction_dict: the transaction with keys:
nonce, chainId, to, data, value, gas, and gasPrice.
:param private_key: the private key to sign the data with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: AttributeDict
.. code-block:: python
>>> transaction = {
# Note that the address must be in checksum format or native bytes:
'to': '0xF0109fC8DF283027b6285cc889F5aA624EaC1F55',
'value': 1000000000,
'gas': 2000000,
'gasPrice': 234567897654321,
'nonce': 0,
'chainId': 1
}
>>> key = '0x4c0883a69102937d6231471b5dbb6204fe5129617082792ae468d01a3f362318'
>>> signed = Account.signTransaction(transaction, key)
{'hash': HexBytes('0x6893a6ee8df79b0f5d64a180cd1ef35d030f3e296a5361cf04d02ce720d32ec5'),
'r': 4487286261793418179817841024889747115779324305375823110249149479905075174044,
'rawTransaction': HexBytes('0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428'), # noqa: E501
's': 30785525769477805655994251009256770582792548537338581640010273753578382951464,
'v': 37}
>>> w3.eth.sendRawTransaction(signed.rawTransaction)
|
train
|
https://github.com/ethereum/eth-account/blob/335199b815ae34fea87f1523e2f29777fd52946e/eth_account/account.py#L396-L467
| null |
class Account(object):
'''
This is the primary entry point for working with Ethereum private keys.
It does **not** require a connection to an Ethereum node.
'''
_keys = keys
default_kdf = os.getenv('ETH_ACCOUNT_KDF', 'scrypt')
'''
The default key deriviation function (KDF) to use when encrypting a private key. If the
environment variable :envvar:`ETH_ACCOUNT_KDF` is set, it's value will be used as the default.
Otherwise, 'scrypt' will be used as the default.
'''
@combomethod
def create(self, extra_entropy=''):
'''
Creates a new private key, and returns it as a :class:`~eth_account.local.LocalAccount`.
:param extra_entropy: Add extra randomness to whatever randomness your OS can provide
:type extra_entropy: str or bytes or int
:returns: an object with private key and convenience methods
.. code-block:: python
>>> from eth_account import Account
>>> acct = Account.create('KEYSMASH FJAFJKLDSKF7JKFDJ 1530')
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
extra_key_bytes = text_if_str(to_bytes, extra_entropy)
key_bytes = keccak(os.urandom(32) + extra_key_bytes)
return self.privateKeyToAccount(key_bytes)
@staticmethod
def decrypt(keyfile_json, password):
'''
Decrypts a private key that was encrypted using an Ethereum client or
:meth:`~Account.encrypt`.
:param keyfile_json: The encrypted key
:type keyfile_json: dict or str
:param str password: The password that was used to encrypt the key
:returns: the raw private key
:rtype: ~hexbytes.main.HexBytes
.. code-block:: python
>>> encrypted = {
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {'cipher': 'aes-128-ctr',
'cipherparams': {'iv': '78f214584844e0b241b433d7c3bb8d5f'},
'ciphertext': 'd6dbb56e4f54ba6db2e8dc14df17cb7352fdce03681dd3f90ce4b6c1d5af2c4f',
'kdf': 'pbkdf2',
'kdfparams': {'c': 1000000,
'dklen': 32,
'prf': 'hmac-sha256',
'salt': '45cf943b4de2c05c2c440ef96af914a2'},
'mac': 'f5e1af09df5ded25c96fcf075ada313fb6f79735a914adc8cb02e8ddee7813c3'},
'id': 'b812f3f9-78cc-462a-9e89-74418aa27cb0',
'version': 3}
>>> import getpass
>>> Account.decrypt(encrypted, getpass.getpass())
HexBytes('0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364')
'''
if isinstance(keyfile_json, str):
keyfile = json.loads(keyfile_json)
elif is_dict(keyfile_json):
keyfile = keyfile_json
else:
raise TypeError("The keyfile should be supplied as a JSON string, or a dictionary.")
password_bytes = text_if_str(to_bytes, password)
return HexBytes(decode_keyfile_json(keyfile, password_bytes))
@classmethod
def encrypt(cls, private_key, password, kdf=None, iterations=None):
'''
Creates a dictionary with an encrypted version of your private key.
To import this keyfile into Ethereum clients like geth and parity:
encode this dictionary with :func:`json.dumps` and save it to disk where your
client keeps key files.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:param str password: The password which you will need to unlock the account in your client
:param str kdf: The key derivation function to use when encrypting your private key
:param int iterations: The work factor for the key derivation function
:returns: The data to use in your encrypted file
:rtype: dict
.. code-block:: python
>>> import getpass
>>> encrypted = Account.encrypt(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364,
getpass.getpass()
)
{
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {
'cipher': 'aes-128-ctr',
'cipherparams': {
'iv': '0b7845a5c3597d3d378bde9b7c7319b7'
},
'ciphertext': 'a494f1feb3c854e99c1ff01e6aaa17d43c0752009073503b908457dc8de5d2a5', # noqa: E501
'kdf': 'scrypt',
'kdfparams': {
'dklen': 32,
'n': 262144,
'p': 8,
'r': 1,
'salt': '13c4a48123affaa29189e9097726c698'
},
'mac': 'f4cfb027eb0af9bd7a320b4374a3fa7bef02cfbafe0ec5d1fd7ad129401de0b1'
},
'id': 'a60e0578-0e5b-4a75-b991-d55ec6451a6f',
'version': 3
}
>>> with open('my-keyfile', 'w') as f:
f.write(json.dumps(encrypted))
'''
if isinstance(private_key, keys.PrivateKey):
key_bytes = private_key.to_bytes()
else:
key_bytes = HexBytes(private_key)
if kdf is None:
kdf = cls.default_kdf
password_bytes = text_if_str(to_bytes, password)
assert len(key_bytes) == 32
return create_keyfile_json(key_bytes, password_bytes, kdf=kdf, iterations=iterations)
@combomethod
def privateKeyToAccount(self, private_key):
'''
Returns a convenient object for working with the given private key.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:return: object with methods for signing and encrypting
:rtype: LocalAccount
.. code-block:: python
>>> acct = Account.privateKeyToAccount(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364)
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
key = self._parsePrivateKey(private_key)
return LocalAccount(key, self)
@combomethod
def recoverHash(self, message_hash, vrs=None, signature=None):
'''
Get the address of the account that signed the message with the given hash.
You must specify exactly one of: vrs or signature
:param message_hash: the hash of the message that you want to verify
:type message_hash: hex str or bytes or int
:param vrs: the three pieces generated by an elliptic curve signature
:type vrs: tuple(v, r, s), each element is hex str, bytes or int
:param signature: signature bytes concatenated as r+s+v
:type signature: hex str or bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> msg = "I♥SF"
>>> msghash = '0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'
>>> vrs = (
28,
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
# All of these recover calls are equivalent:
# variations on msghash
>>> msghash = b"\\x14v\\xab\\xb7E\\xd4#\\xbf\\t'?\\x1a\\xfd\\x88}\\x95\\x11\\x81\\xd2Z\\xdcf\\xc4\\x83JpI\\x19\\x11\\xb7\\xf7P" # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> msghash = 0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on vrs
>>> vrs = (
'0x1c',
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
b'\\x1c',
b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3', # noqa: E501
b'>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce') # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
0x1c,
0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3,
0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce)
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on signature
>>> signature = '0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce\\x1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = 0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
'''
hash_bytes = HexBytes(message_hash)
if len(hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
if vrs is not None:
v, r, s = map(hexstr_if_str(to_int), vrs)
v_standard = to_standard_v(v)
signature_obj = self._keys.Signature(vrs=(v_standard, r, s))
elif signature is not None:
signature_bytes = HexBytes(signature)
signature_bytes_standard = to_standard_signature_bytes(signature_bytes)
signature_obj = self._keys.Signature(signature_bytes=signature_bytes_standard)
else:
raise TypeError("You must supply the vrs tuple or the signature bytes")
pubkey = signature_obj.recover_public_key_from_msg_hash(hash_bytes)
return pubkey.to_checksum_address()
@combomethod
def recoverTransaction(self, serialized_transaction):
'''
Get the address of the account that signed this transaction.
:param serialized_transaction: the complete signed transaction
:type serialized_transaction: hex str, bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> raw_transaction = '0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428', # noqa: E501
>>> Account.recoverTransaction(raw_transaction)
'0x2c7536E3605D9C16a7a3D7b1898e529396a65c23'
'''
txn_bytes = HexBytes(serialized_transaction)
txn = Transaction.from_bytes(txn_bytes)
msg_hash = hash_of_signed_transaction(txn)
return self.recoverHash(msg_hash, vrs=vrs_from(txn))
def setKeyBackend(self, backend):
'''
Change the backend used by the underlying eth-keys library.
*(The default is fine for most users)*
:param backend: any backend that works in
`eth_keys.KeyApi(backend) <https://github.com/ethereum/eth-keys/#keyapibackendnone>`_
'''
self._keys = KeyAPI(backend)
@combomethod
def signHash(self, message_hash, private_key):
'''
Sign the hash provided.
.. WARNING:: *Never* sign a hash that you didn't generate,
it can be an arbitrary transaction. For example, it might
send all of your account's ether to an attacker.
If you would like compatibility with
:meth:`w3.eth.sign() <web3.eth.Eth.sign>`
you can use :meth:`~eth_account.messages.defunct_hash_message`.
Several other message standards are proposed, but none have a clear
consensus. You'll need to manually comply with any of those message standards manually.
:param message_hash: the 32-byte message hash to be signed
:type message_hash: hex str, bytes or int
:param private_key: the key to sign the message with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: ~eth_account.datastructures.AttributeDict
.. code-block:: python
>>> msg = "I♥SF"
>>> from eth_account.messages import defunct_hash_message
>>> msghash = defunct_hash_message(text=msg)
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
>>> key = "0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364"
>>> Account.signHash(msghash, key)
{'messageHash': HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'), # noqa: E501
'r': 104389933075820307925104709181714897380569894203213074526835978196648170704563,
's': 28205917190874851400050446352651915501321657673772411533993420917949420456142,
'signature': HexBytes('0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c'), # noqa: E501
'v': 28}
# these are equivalent:
>>> Account.signHash(
0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750,
key
)
>>> Account.signHash(
"0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750",
key
)
'''
msg_hash_bytes = HexBytes(message_hash)
if len(msg_hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
key = self._parsePrivateKey(private_key)
(v, r, s, eth_signature_bytes) = sign_message_hash(key, msg_hash_bytes)
return AttributeDict({
'messageHash': msg_hash_bytes,
'r': r,
's': s,
'v': v,
'signature': HexBytes(eth_signature_bytes),
})
@combomethod
@combomethod
def _parsePrivateKey(self, key):
'''
Generate a :class:`eth_keys.datatypes.PrivateKey` from the provided key. If the
key is already of type :class:`eth_keys.datatypes.PrivateKey`, return the key.
:param key: the private key from which a :class:`eth_keys.datatypes.PrivateKey`
will be generated
:type key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: the provided key represented as a :class:`eth_keys.datatypes.PrivateKey`
'''
if isinstance(key, self._keys.PrivateKey):
return key
try:
return self._keys.PrivateKey(HexBytes(key))
except ValidationError as original_exception:
raise ValueError(
"The private key must be exactly 32 bytes long, instead of "
"%d bytes." % len(key)
) from original_exception
|
ethereum/eth-account
|
eth_account/account.py
|
Account._parsePrivateKey
|
python
|
def _parsePrivateKey(self, key):
'''
Generate a :class:`eth_keys.datatypes.PrivateKey` from the provided key. If the
key is already of type :class:`eth_keys.datatypes.PrivateKey`, return the key.
:param key: the private key from which a :class:`eth_keys.datatypes.PrivateKey`
will be generated
:type key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: the provided key represented as a :class:`eth_keys.datatypes.PrivateKey`
'''
if isinstance(key, self._keys.PrivateKey):
return key
try:
return self._keys.PrivateKey(HexBytes(key))
except ValidationError as original_exception:
raise ValueError(
"The private key must be exactly 32 bytes long, instead of "
"%d bytes." % len(key)
) from original_exception
|
Generate a :class:`eth_keys.datatypes.PrivateKey` from the provided key. If the
key is already of type :class:`eth_keys.datatypes.PrivateKey`, return the key.
:param key: the private key from which a :class:`eth_keys.datatypes.PrivateKey`
will be generated
:type key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: the provided key represented as a :class:`eth_keys.datatypes.PrivateKey`
|
train
|
https://github.com/ethereum/eth-account/blob/335199b815ae34fea87f1523e2f29777fd52946e/eth_account/account.py#L470-L489
| null |
class Account(object):
'''
This is the primary entry point for working with Ethereum private keys.
It does **not** require a connection to an Ethereum node.
'''
_keys = keys
default_kdf = os.getenv('ETH_ACCOUNT_KDF', 'scrypt')
'''
The default key deriviation function (KDF) to use when encrypting a private key. If the
environment variable :envvar:`ETH_ACCOUNT_KDF` is set, it's value will be used as the default.
Otherwise, 'scrypt' will be used as the default.
'''
@combomethod
def create(self, extra_entropy=''):
'''
Creates a new private key, and returns it as a :class:`~eth_account.local.LocalAccount`.
:param extra_entropy: Add extra randomness to whatever randomness your OS can provide
:type extra_entropy: str or bytes or int
:returns: an object with private key and convenience methods
.. code-block:: python
>>> from eth_account import Account
>>> acct = Account.create('KEYSMASH FJAFJKLDSKF7JKFDJ 1530')
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
extra_key_bytes = text_if_str(to_bytes, extra_entropy)
key_bytes = keccak(os.urandom(32) + extra_key_bytes)
return self.privateKeyToAccount(key_bytes)
@staticmethod
def decrypt(keyfile_json, password):
'''
Decrypts a private key that was encrypted using an Ethereum client or
:meth:`~Account.encrypt`.
:param keyfile_json: The encrypted key
:type keyfile_json: dict or str
:param str password: The password that was used to encrypt the key
:returns: the raw private key
:rtype: ~hexbytes.main.HexBytes
.. code-block:: python
>>> encrypted = {
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {'cipher': 'aes-128-ctr',
'cipherparams': {'iv': '78f214584844e0b241b433d7c3bb8d5f'},
'ciphertext': 'd6dbb56e4f54ba6db2e8dc14df17cb7352fdce03681dd3f90ce4b6c1d5af2c4f',
'kdf': 'pbkdf2',
'kdfparams': {'c': 1000000,
'dklen': 32,
'prf': 'hmac-sha256',
'salt': '45cf943b4de2c05c2c440ef96af914a2'},
'mac': 'f5e1af09df5ded25c96fcf075ada313fb6f79735a914adc8cb02e8ddee7813c3'},
'id': 'b812f3f9-78cc-462a-9e89-74418aa27cb0',
'version': 3}
>>> import getpass
>>> Account.decrypt(encrypted, getpass.getpass())
HexBytes('0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364')
'''
if isinstance(keyfile_json, str):
keyfile = json.loads(keyfile_json)
elif is_dict(keyfile_json):
keyfile = keyfile_json
else:
raise TypeError("The keyfile should be supplied as a JSON string, or a dictionary.")
password_bytes = text_if_str(to_bytes, password)
return HexBytes(decode_keyfile_json(keyfile, password_bytes))
@classmethod
def encrypt(cls, private_key, password, kdf=None, iterations=None):
'''
Creates a dictionary with an encrypted version of your private key.
To import this keyfile into Ethereum clients like geth and parity:
encode this dictionary with :func:`json.dumps` and save it to disk where your
client keeps key files.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:param str password: The password which you will need to unlock the account in your client
:param str kdf: The key derivation function to use when encrypting your private key
:param int iterations: The work factor for the key derivation function
:returns: The data to use in your encrypted file
:rtype: dict
.. code-block:: python
>>> import getpass
>>> encrypted = Account.encrypt(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364,
getpass.getpass()
)
{
'address': '5ce9454909639d2d17a3f753ce7d93fa0b9ab12e',
'crypto': {
'cipher': 'aes-128-ctr',
'cipherparams': {
'iv': '0b7845a5c3597d3d378bde9b7c7319b7'
},
'ciphertext': 'a494f1feb3c854e99c1ff01e6aaa17d43c0752009073503b908457dc8de5d2a5', # noqa: E501
'kdf': 'scrypt',
'kdfparams': {
'dklen': 32,
'n': 262144,
'p': 8,
'r': 1,
'salt': '13c4a48123affaa29189e9097726c698'
},
'mac': 'f4cfb027eb0af9bd7a320b4374a3fa7bef02cfbafe0ec5d1fd7ad129401de0b1'
},
'id': 'a60e0578-0e5b-4a75-b991-d55ec6451a6f',
'version': 3
}
>>> with open('my-keyfile', 'w') as f:
f.write(json.dumps(encrypted))
'''
if isinstance(private_key, keys.PrivateKey):
key_bytes = private_key.to_bytes()
else:
key_bytes = HexBytes(private_key)
if kdf is None:
kdf = cls.default_kdf
password_bytes = text_if_str(to_bytes, password)
assert len(key_bytes) == 32
return create_keyfile_json(key_bytes, password_bytes, kdf=kdf, iterations=iterations)
@combomethod
def privateKeyToAccount(self, private_key):
'''
Returns a convenient object for working with the given private key.
:param private_key: The raw private key
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:return: object with methods for signing and encrypting
:rtype: LocalAccount
.. code-block:: python
>>> acct = Account.privateKeyToAccount(
0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364)
>>> acct.address
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
>>> acct.privateKey
b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d"
# These methods are also available: signHash(), signTransaction(), encrypt()
# They correspond to the same-named methods in Account.*
# but without the private key argument
'''
key = self._parsePrivateKey(private_key)
return LocalAccount(key, self)
@combomethod
def recoverHash(self, message_hash, vrs=None, signature=None):
'''
Get the address of the account that signed the message with the given hash.
You must specify exactly one of: vrs or signature
:param message_hash: the hash of the message that you want to verify
:type message_hash: hex str or bytes or int
:param vrs: the three pieces generated by an elliptic curve signature
:type vrs: tuple(v, r, s), each element is hex str, bytes or int
:param signature: signature bytes concatenated as r+s+v
:type signature: hex str or bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> msg = "I♥SF"
>>> msghash = '0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'
>>> vrs = (
28,
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
'0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E'
# All of these recover calls are equivalent:
# variations on msghash
>>> msghash = b"\\x14v\\xab\\xb7E\\xd4#\\xbf\\t'?\\x1a\\xfd\\x88}\\x95\\x11\\x81\\xd2Z\\xdcf\\xc4\\x83JpI\\x19\\x11\\xb7\\xf7P" # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> msghash = 0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on vrs
>>> vrs = (
'0x1c',
'0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3',
'0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce')
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
b'\\x1c',
b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3', # noqa: E501
b'>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce') # noqa: E501
>>> Account.recoverHash(msghash, vrs=vrs)
>>> vrs = (
0x1c,
0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb3,
0x3e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce)
>>> Account.recoverHash(msghash, vrs=vrs)
# variations on signature
>>> signature = '0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = b'\\xe6\\xca\\x9b\\xbaX\\xc8\\x86\\x11\\xfa\\xd6jl\\xe8\\xf9\\x96\\x90\\x81\\x95Y8\\x07\\xc4\\xb3\\x8b\\xd5(\\xd2\\xcf\\xf0\\x9dN\\xb3>[\\xfb\\xbfM>9\\xb1\\xa2\\xfd\\x81jv\\x80\\xc1\\x9e\\xbe\\xba\\xf3\\xa1A\\xb29\\x93J\\xd4<\\xb3?\\xce\\xc8\\xce\\x1c' # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
>>> signature = 0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c # noqa: E501
>>> Account.recoverHash(msghash, signature=signature)
'''
hash_bytes = HexBytes(message_hash)
if len(hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
if vrs is not None:
v, r, s = map(hexstr_if_str(to_int), vrs)
v_standard = to_standard_v(v)
signature_obj = self._keys.Signature(vrs=(v_standard, r, s))
elif signature is not None:
signature_bytes = HexBytes(signature)
signature_bytes_standard = to_standard_signature_bytes(signature_bytes)
signature_obj = self._keys.Signature(signature_bytes=signature_bytes_standard)
else:
raise TypeError("You must supply the vrs tuple or the signature bytes")
pubkey = signature_obj.recover_public_key_from_msg_hash(hash_bytes)
return pubkey.to_checksum_address()
@combomethod
def recoverTransaction(self, serialized_transaction):
'''
Get the address of the account that signed this transaction.
:param serialized_transaction: the complete signed transaction
:type serialized_transaction: hex str, bytes or int
:returns: address of signer, hex-encoded & checksummed
:rtype: str
.. code-block:: python
>>> raw_transaction = '0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428', # noqa: E501
>>> Account.recoverTransaction(raw_transaction)
'0x2c7536E3605D9C16a7a3D7b1898e529396a65c23'
'''
txn_bytes = HexBytes(serialized_transaction)
txn = Transaction.from_bytes(txn_bytes)
msg_hash = hash_of_signed_transaction(txn)
return self.recoverHash(msg_hash, vrs=vrs_from(txn))
def setKeyBackend(self, backend):
'''
Change the backend used by the underlying eth-keys library.
*(The default is fine for most users)*
:param backend: any backend that works in
`eth_keys.KeyApi(backend) <https://github.com/ethereum/eth-keys/#keyapibackendnone>`_
'''
self._keys = KeyAPI(backend)
@combomethod
def signHash(self, message_hash, private_key):
'''
Sign the hash provided.
.. WARNING:: *Never* sign a hash that you didn't generate,
it can be an arbitrary transaction. For example, it might
send all of your account's ether to an attacker.
If you would like compatibility with
:meth:`w3.eth.sign() <web3.eth.Eth.sign>`
you can use :meth:`~eth_account.messages.defunct_hash_message`.
Several other message standards are proposed, but none have a clear
consensus. You'll need to manually comply with any of those message standards manually.
:param message_hash: the 32-byte message hash to be signed
:type message_hash: hex str, bytes or int
:param private_key: the key to sign the message with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: ~eth_account.datastructures.AttributeDict
.. code-block:: python
>>> msg = "I♥SF"
>>> from eth_account.messages import defunct_hash_message
>>> msghash = defunct_hash_message(text=msg)
HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750')
>>> key = "0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364"
>>> Account.signHash(msghash, key)
{'messageHash': HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'), # noqa: E501
'r': 104389933075820307925104709181714897380569894203213074526835978196648170704563,
's': 28205917190874851400050446352651915501321657673772411533993420917949420456142,
'signature': HexBytes('0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c'), # noqa: E501
'v': 28}
# these are equivalent:
>>> Account.signHash(
0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750,
key
)
>>> Account.signHash(
"0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750",
key
)
'''
msg_hash_bytes = HexBytes(message_hash)
if len(msg_hash_bytes) != 32:
raise ValueError("The message hash must be exactly 32-bytes")
key = self._parsePrivateKey(private_key)
(v, r, s, eth_signature_bytes) = sign_message_hash(key, msg_hash_bytes)
return AttributeDict({
'messageHash': msg_hash_bytes,
'r': r,
's': s,
'v': v,
'signature': HexBytes(eth_signature_bytes),
})
@combomethod
def signTransaction(self, transaction_dict, private_key):
'''
Sign a transaction using a local private key. Produces signature details
and the hex-encoded transaction suitable for broadcast using
:meth:`w3.eth.sendRawTransaction() <web3.eth.Eth.sendRawTransaction>`.
Create the transaction dict for a contract method with
`my_contract.functions.my_function().buildTransaction()
<http://web3py.readthedocs.io/en/latest/contracts.html#methods>`_
:param dict transaction_dict: the transaction with keys:
nonce, chainId, to, data, value, gas, and gasPrice.
:param private_key: the private key to sign the data with
:type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey`
:returns: Various details about the signature - most
importantly the fields: v, r, and s
:rtype: AttributeDict
.. code-block:: python
>>> transaction = {
# Note that the address must be in checksum format or native bytes:
'to': '0xF0109fC8DF283027b6285cc889F5aA624EaC1F55',
'value': 1000000000,
'gas': 2000000,
'gasPrice': 234567897654321,
'nonce': 0,
'chainId': 1
}
>>> key = '0x4c0883a69102937d6231471b5dbb6204fe5129617082792ae468d01a3f362318'
>>> signed = Account.signTransaction(transaction, key)
{'hash': HexBytes('0x6893a6ee8df79b0f5d64a180cd1ef35d030f3e296a5361cf04d02ce720d32ec5'),
'r': 4487286261793418179817841024889747115779324305375823110249149479905075174044,
'rawTransaction': HexBytes('0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428'), # noqa: E501
's': 30785525769477805655994251009256770582792548537338581640010273753578382951464,
'v': 37}
>>> w3.eth.sendRawTransaction(signed.rawTransaction)
'''
if not isinstance(transaction_dict, Mapping):
raise TypeError("transaction_dict must be dict-like, got %r" % transaction_dict)
account = self.privateKeyToAccount(private_key)
# allow from field, *only* if it matches the private key
if 'from' in transaction_dict:
if transaction_dict['from'] == account.address:
sanitized_transaction = dissoc(transaction_dict, 'from')
else:
raise TypeError("from field must match key's %s, but it was %s" % (
account.address,
transaction_dict['from'],
))
else:
sanitized_transaction = transaction_dict
# sign transaction
(
v,
r,
s,
rlp_encoded,
) = sign_transaction_dict(account._key_obj, sanitized_transaction)
transaction_hash = keccak(rlp_encoded)
return AttributeDict({
'rawTransaction': HexBytes(rlp_encoded),
'hash': HexBytes(transaction_hash),
'r': r,
's': s,
'v': v,
})
@combomethod
|
ethereum/eth-account
|
eth_account/_utils/structured_data/hashing.py
|
get_dependencies
|
python
|
def get_dependencies(primary_type, types):
deps = set()
struct_names_yet_to_be_expanded = [primary_type]
while len(struct_names_yet_to_be_expanded) > 0:
struct_name = struct_names_yet_to_be_expanded.pop()
deps.add(struct_name)
fields = types[struct_name]
for field in fields:
if field["type"] not in types:
# We don't need to expand types that are not user defined (customized)
continue
elif field["type"] in deps:
# skip types that we have already encountered
continue
else:
# Custom Struct Type
struct_names_yet_to_be_expanded.append(field["type"])
# Don't need to make a struct as dependency of itself
deps.remove(primary_type)
return tuple(deps)
|
Perform DFS to get all the dependencies of the primary_type
|
train
|
https://github.com/ethereum/eth-account/blob/335199b815ae34fea87f1523e2f29777fd52946e/eth_account/_utils/structured_data/hashing.py#L28-L54
| null |
from itertools import (
groupby,
)
import json
from operator import (
itemgetter,
)
from eth_abi import (
encode_abi,
is_encodable,
)
from eth_abi.grammar import (
parse,
)
from eth_utils import (
ValidationError,
keccak,
to_tuple,
toolz,
)
from .validation import (
validate_structured_data,
)
def field_identifier(field):
"""
Given a ``field`` of the format {'name': NAME, 'type': TYPE},
this function converts it to ``TYPE NAME``
"""
return "{0} {1}".format(field["type"], field["name"])
def encode_struct(struct_name, struct_field_types):
return "{0}({1})".format(
struct_name,
','.join(map(field_identifier, struct_field_types)),
)
def encode_type(primary_type, types):
"""
The type of a struct is encoded as name ‖ "(" ‖ member₁ ‖ "," ‖ member₂ ‖ "," ‖ … ‖ memberₙ ")"
where each member is written as type ‖ " " ‖ name.
"""
# Getting the dependencies and sorting them alphabetically as per EIP712
deps = get_dependencies(primary_type, types)
sorted_deps = (primary_type,) + tuple(sorted(deps))
result = ''.join(
[
encode_struct(struct_name, types[struct_name])
for struct_name in sorted_deps
]
)
return result
def hash_struct_type(primary_type, types):
return keccak(text=encode_type(primary_type, types))
def is_valid_abi_type(type_name):
"""
This function is used to make sure that the ``type_name`` is a valid ABI Type.
Please note that this is a temporary function and should be replaced by the corresponding
ABI function, once the following issue has been resolved.
https://github.com/ethereum/eth-abi/issues/125
"""
valid_abi_types = {"address", "bool", "bytes", "int", "string", "uint"}
is_bytesN = type_name.startswith("bytes") and 1 <= int(type_name[5:]) <= 32
is_intN = (
type_name.startswith("int") and
8 <= int(type_name[3:]) <= 256 and
int(type_name[3:]) % 8 == 0
)
is_uintN = (
type_name.startswith("uint") and
8 <= int(type_name[4:]) <= 256 and
int(type_name[4:]) % 8 == 0
)
if type_name in valid_abi_types:
return True
elif is_bytesN:
# bytes1 to bytes32
return True
elif is_intN:
# int8 to int256
return True
elif is_uintN:
# uint8 to uint256
return True
return False
def is_array_type(type):
# Identify if type such as "person[]" or "person[2]" is an array
abi_type = parse(type)
return abi_type.is_array
@to_tuple
def get_depths_and_dimensions(data, depth):
"""
Yields 2-length tuples of depth and dimension of each element at that depth
"""
if not isinstance(data, (list, tuple)):
# Not checking for Iterable instance, because even Dictionaries and strings
# are considered as iterables, but that's not what we want the condition to be.
return ()
yield depth, len(data)
for item in data:
# iterating over all 1 dimension less sub-data items
yield from get_depths_and_dimensions(item, depth + 1)
def get_array_dimensions(data):
"""
Given an array type data item, check that it is an array and
return the dimensions as a tuple.
Ex: get_array_dimensions([[1, 2, 3], [4, 5, 6]]) returns (2, 3)
"""
depths_and_dimensions = get_depths_and_dimensions(data, 0)
# re-form as a dictionary with `depth` as key, and all of the dimensions found at that depth.
grouped_by_depth = {
depth: tuple(dimension for depth, dimension in group)
for depth, group in groupby(depths_and_dimensions, itemgetter(0))
}
# validate that there is only one dimension for any given depth.
invalid_depths_dimensions = tuple(
(depth, dimensions)
for depth, dimensions in grouped_by_depth.items()
if len(set(dimensions)) != 1
)
if invalid_depths_dimensions:
raise ValidationError(
'\n'.join(
[
"Depth {0} of array data has more than one dimensions: {1}".
format(depth, dimensions)
for depth, dimensions in invalid_depths_dimensions
]
)
)
dimensions = tuple(
toolz.first(set(dimensions))
for depth, dimensions in sorted(grouped_by_depth.items())
)
return dimensions
@to_tuple
def flatten_multidimensional_array(array):
for item in array:
if not isinstance(item, (list, tuple)):
# Not checking for Iterable instance, because even Dictionaries and strings
# are considered as iterables, but that's not what we want the condition to be.
yield from flatten_multidimensional_array(item)
else:
yield item
@to_tuple
def _encode_data(primary_type, types, data):
# Add typehash
yield "bytes32", hash_struct_type(primary_type, types)
# Add field contents
for field in types[primary_type]:
value = data[field["name"]]
if field["type"] == "string":
if not isinstance(value, str):
raise TypeError(
"Value of `{0}` ({2}) in the struct `{1}` is of the type `{3}`, but expected "
"string value".format(
field["name"],
primary_type,
value,
type(value),
)
)
# Special case where the values need to be keccak hashed before they are encoded
hashed_value = keccak(text=value)
yield "bytes32", hashed_value
elif field["type"] == "bytes":
if not isinstance(value, bytes):
raise TypeError(
"Value of `{0}` ({2}) in the struct `{1}` is of the type `{3}`, but expected "
"bytes value".format(
field["name"],
primary_type,
value,
type(value),
)
)
# Special case where the values need to be keccak hashed before they are encoded
hashed_value = keccak(primitive=value)
yield "bytes32", hashed_value
elif field["type"] in types:
# This means that this type is a user defined type
hashed_value = keccak(primitive=encode_data(field["type"], types, value))
yield "bytes32", hashed_value
elif is_array_type(field["type"]):
# Get the dimensions from the value
array_dimensions = get_array_dimensions(value)
# Get the dimensions from what was declared in the schema
parsed_type = parse(field["type"])
for i in range(len(array_dimensions)):
if len(parsed_type.arrlist[i]) == 0:
# Skip empty or dynamically declared dimensions
continue
if array_dimensions[i] != parsed_type.arrlist[i][0]:
# Dimensions should match with declared schema
raise TypeError(
"Array data `{0}` has dimensions `{1}` whereas the "
"schema has dimensions `{2}`".format(
value,
array_dimensions,
tuple(map(lambda x: x[0], parsed_type.arrlist)),
)
)
array_items = flatten_multidimensional_array(value)
array_items_encoding = [
encode_data(parsed_type.base, types, array_item)
for array_item in array_items
]
concatenated_array_encodings = ''.join(array_items_encoding)
hashed_value = keccak(concatenated_array_encodings)
yield "bytes32", hashed_value
else:
# First checking to see if type is valid as per abi
if not is_valid_abi_type(field["type"]):
raise TypeError(
"Received Invalid type `{0}` in the struct `{1}`".format(
field["type"],
primary_type,
)
)
# Next see if the data fits the specified encoding type
if is_encodable(field["type"], value):
# field["type"] is a valid type and this value corresponds to that type.
yield field["type"], value
else:
raise TypeError(
"Value of `{0}` ({2}) in the struct `{1}` is of the type `{3}`, but expected "
"{4} value".format(
field["name"],
primary_type,
value,
type(value),
field["type"],
)
)
def encode_data(primaryType, types, data):
data_types_and_hashes = _encode_data(primaryType, types, data)
data_types, data_hashes = zip(*data_types_and_hashes)
return encode_abi(data_types, data_hashes)
def load_and_validate_structured_message(structured_json_string_data):
structured_data = json.loads(structured_json_string_data)
validate_structured_data(structured_data)
return structured_data
def hash_domain(structured_data):
return keccak(
encode_data(
"EIP712Domain",
structured_data["types"],
structured_data["domain"]
)
)
def hash_message(structured_data):
return keccak(
encode_data(
structured_data["primaryType"],
structured_data["types"],
structured_data["message"]
)
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.