repository_name stringclasses 316 values | func_path_in_repository stringlengths 6 223 | func_name stringlengths 1 134 | language stringclasses 1 value | func_code_string stringlengths 57 65.5k | func_documentation_string stringlengths 1 46.3k | split_name stringclasses 1 value | func_code_url stringlengths 91 315 | called_functions listlengths 1 156 ⌀ | enclosing_scope stringlengths 2 1.48M |
|---|---|---|---|---|---|---|---|---|---|
xflr6/concepts | concepts/definitions.py | Definition.set_object | python | def set_object(self, obj, properties):
self._objects.add(obj)
properties = set(properties)
self._properties |= properties
pairs = self._pairs
for p in self._properties:
if p in properties:
pairs.add((obj, p))
else:
pairs.discard((obj, p)) | Add an object to the definition and set its ``properties``. | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/definitions.py#L372-L382 | null | class Definition(Triple):
"""Mutable triple of ``(objects, properties, bools)`` for creating a context.
Example:
>>> Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])
<Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])>
Usage:
>>> d = Definition()
>>> d
<Definition([], [], [])>
>>> d.add_object('King Arthur')
>>> print(d)
|
King Arthur|
>>> d.add_object('Sir Robin', ['human', 'knight'])
>>> d.add_object('holy grail')
>>> print(d)
|human|knight|
King Arthur| | |
Sir Robin |X |X |
holy grail | | |
>>> d.add_object('King Arthur', ['human', 'knight', 'king'])
>>> d.add_property('mysterious', ['holy grail', 'Sir Robin'])
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | |X |
holy grail | | | |X |
>>> d['Sir Robin', 'mysterious'] = False
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | | |
holy grail | | | |X |
>>> e = d.copy()
>>> e.move_object('holy grail', 0)
>>> e.move_property('mysterious', 0)
>>> e.move_property('king', 1)
>>> print(e)
|mysterious|king|human|knight|
holy grail |X | | | |
King Arthur| |X |X |X |
Sir Robin | | |X |X |
>>> e = d.copy()
>>> e.rename_object('Sir Robin', 'Launcelot')
>>> e.add_property('brave', ['Launcelot'])
>>> e.rename_object('holy grail', 'grail')
>>> e.rename_property('mysterious', 'holy')
>>> print(e)
|human|knight|king|holy|brave|
King Arthur|X |X |X | | |
Launcelot |X |X | | |X |
grail | | | |X | |
>>> print(e | d)
|human|knight|king|holy|brave|mysterious|
King Arthur|X |X |X | | | |
Launcelot |X |X | | |X | |
grail | | | |X | | |
Sir Robin |X |X | | | | |
holy grail | | | | | |X |
>>> print(e & d)
|human|knight|king|
King Arthur|X |X |X |
>>> e.remove_object('grail')
>>> e.remove_property('holy')
>>> e.rename_object('King Arthur', 'Arthur')
>>> e.set_property('king', [])
>>> e.set_object('Launcelot', ['human'])
>>> print(e)
|human|knight|king|brave|
Arthur |X |X | | |
Launcelot|X | | | |
>>> e.set_property('knight', ['Launcelot'])
>>> print(e)
|human|knight|king|brave|
Arthur |X | | | |
Launcelot|X |X | | |
"""
def rename_object(self, old, new):
"""Replace the name of an object by a new one."""
self._objects.replace(old, new)
pairs = self._pairs
pairs |= {(new, p) for p in self._properties
if (old, p) in pairs and not pairs.remove((old, p))}
def rename_property(self, old, new):
"""Replace the name of a property by a new one."""
self._properties.replace(old, new)
pairs = self._pairs
pairs |= {(o, new) for o in self._objects
if (o, old) in pairs and not pairs.remove((o, old))}
def move_object(self, obj, index):
"""Reorder the definition such that object is at ``index``."""
self._objects.move(obj, index)
def move_property(self, prop, index):
"""Reorder the definition such that property is at ``index``."""
self._properties.move(prop, index)
def __setitem__(self, pair, value):
if isinstance(pair, int):
raise ValueError("can't set item")
o, p = pair
self._objects.add(o)
self._properties.add(p)
if value:
self._pairs.add(pair)
else:
self._pairs.discard(pair)
def add_object(self, obj, properties=()):
"""Add an object to the definition and add ``properties`` as related."""
self._objects.add(obj)
self._properties |= properties
self._pairs.update((obj, p) for p in properties)
def add_property(self, prop, objects=()):
"""Add a property to the definition and add ``objects`` as related."""
self._properties.add(prop)
self._objects |= objects
self._pairs.update((o, prop) for o in objects)
def remove_object(self, obj):
"""Remove an object from the definition."""
self._objects.remove(obj)
self._pairs.difference_update((obj, p) for p in self._properties)
def remove_property(self, prop):
"""Remove a property from the definition."""
self._properties.remove(prop)
self._pairs.difference_update((o, prop) for o in self._objects)
def set_property(self, prop, objects):
"""Add a property to the definition and set its ``objects``."""
self._properties.add(prop)
objects = set(objects)
self._objects |= objects
pairs = self._pairs
for o in self._objects:
if o in objects:
pairs.add((o, prop))
else:
pairs.discard((o, prop))
def union_update(self, other, ignore_conflicts=False):
"""Update the definition with the union of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects |= other._objects
self._properties |= other._properties
self._pairs |= other._pairs
def intersection_update(self, other, ignore_conflicts=False):
"""Update the definition with the intersection of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects &= other._objects
self._properties &= other._properties
self._pairs &= other._pairs
def __ior__(self, other):
self.union_update(other)
return self
def __iand__(self, other):
self.intersection_update(other)
return self
def union(self, other, ignore_conflicts=False):
"""Return a new definition from the union of the definitions."""
result = self.copy()
result.union_update(other, ignore_conflicts)
return result
def intersection(self, other, ignore_conflicts=False):
"""Return a new definition from the intersection of the definitions."""
result = self.copy()
result.intersection_update(other, ignore_conflicts)
return result
__or__ = union
__and__ = intersection
|
xflr6/concepts | concepts/definitions.py | Definition.set_property | python | def set_property(self, prop, objects):
self._properties.add(prop)
objects = set(objects)
self._objects |= objects
pairs = self._pairs
for o in self._objects:
if o in objects:
pairs.add((o, prop))
else:
pairs.discard((o, prop)) | Add a property to the definition and set its ``objects``. | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/definitions.py#L384-L394 | null | class Definition(Triple):
"""Mutable triple of ``(objects, properties, bools)`` for creating a context.
Example:
>>> Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])
<Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])>
Usage:
>>> d = Definition()
>>> d
<Definition([], [], [])>
>>> d.add_object('King Arthur')
>>> print(d)
|
King Arthur|
>>> d.add_object('Sir Robin', ['human', 'knight'])
>>> d.add_object('holy grail')
>>> print(d)
|human|knight|
King Arthur| | |
Sir Robin |X |X |
holy grail | | |
>>> d.add_object('King Arthur', ['human', 'knight', 'king'])
>>> d.add_property('mysterious', ['holy grail', 'Sir Robin'])
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | |X |
holy grail | | | |X |
>>> d['Sir Robin', 'mysterious'] = False
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | | |
holy grail | | | |X |
>>> e = d.copy()
>>> e.move_object('holy grail', 0)
>>> e.move_property('mysterious', 0)
>>> e.move_property('king', 1)
>>> print(e)
|mysterious|king|human|knight|
holy grail |X | | | |
King Arthur| |X |X |X |
Sir Robin | | |X |X |
>>> e = d.copy()
>>> e.rename_object('Sir Robin', 'Launcelot')
>>> e.add_property('brave', ['Launcelot'])
>>> e.rename_object('holy grail', 'grail')
>>> e.rename_property('mysterious', 'holy')
>>> print(e)
|human|knight|king|holy|brave|
King Arthur|X |X |X | | |
Launcelot |X |X | | |X |
grail | | | |X | |
>>> print(e | d)
|human|knight|king|holy|brave|mysterious|
King Arthur|X |X |X | | | |
Launcelot |X |X | | |X | |
grail | | | |X | | |
Sir Robin |X |X | | | | |
holy grail | | | | | |X |
>>> print(e & d)
|human|knight|king|
King Arthur|X |X |X |
>>> e.remove_object('grail')
>>> e.remove_property('holy')
>>> e.rename_object('King Arthur', 'Arthur')
>>> e.set_property('king', [])
>>> e.set_object('Launcelot', ['human'])
>>> print(e)
|human|knight|king|brave|
Arthur |X |X | | |
Launcelot|X | | | |
>>> e.set_property('knight', ['Launcelot'])
>>> print(e)
|human|knight|king|brave|
Arthur |X | | | |
Launcelot|X |X | | |
"""
def rename_object(self, old, new):
"""Replace the name of an object by a new one."""
self._objects.replace(old, new)
pairs = self._pairs
pairs |= {(new, p) for p in self._properties
if (old, p) in pairs and not pairs.remove((old, p))}
def rename_property(self, old, new):
"""Replace the name of a property by a new one."""
self._properties.replace(old, new)
pairs = self._pairs
pairs |= {(o, new) for o in self._objects
if (o, old) in pairs and not pairs.remove((o, old))}
def move_object(self, obj, index):
"""Reorder the definition such that object is at ``index``."""
self._objects.move(obj, index)
def move_property(self, prop, index):
"""Reorder the definition such that property is at ``index``."""
self._properties.move(prop, index)
def __setitem__(self, pair, value):
if isinstance(pair, int):
raise ValueError("can't set item")
o, p = pair
self._objects.add(o)
self._properties.add(p)
if value:
self._pairs.add(pair)
else:
self._pairs.discard(pair)
def add_object(self, obj, properties=()):
"""Add an object to the definition and add ``properties`` as related."""
self._objects.add(obj)
self._properties |= properties
self._pairs.update((obj, p) for p in properties)
def add_property(self, prop, objects=()):
"""Add a property to the definition and add ``objects`` as related."""
self._properties.add(prop)
self._objects |= objects
self._pairs.update((o, prop) for o in objects)
def remove_object(self, obj):
"""Remove an object from the definition."""
self._objects.remove(obj)
self._pairs.difference_update((obj, p) for p in self._properties)
def remove_property(self, prop):
"""Remove a property from the definition."""
self._properties.remove(prop)
self._pairs.difference_update((o, prop) for o in self._objects)
def set_object(self, obj, properties):
"""Add an object to the definition and set its ``properties``."""
self._objects.add(obj)
properties = set(properties)
self._properties |= properties
pairs = self._pairs
for p in self._properties:
if p in properties:
pairs.add((obj, p))
else:
pairs.discard((obj, p))
def union_update(self, other, ignore_conflicts=False):
"""Update the definition with the union of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects |= other._objects
self._properties |= other._properties
self._pairs |= other._pairs
def intersection_update(self, other, ignore_conflicts=False):
"""Update the definition with the intersection of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects &= other._objects
self._properties &= other._properties
self._pairs &= other._pairs
def __ior__(self, other):
self.union_update(other)
return self
def __iand__(self, other):
self.intersection_update(other)
return self
def union(self, other, ignore_conflicts=False):
"""Return a new definition from the union of the definitions."""
result = self.copy()
result.union_update(other, ignore_conflicts)
return result
def intersection(self, other, ignore_conflicts=False):
"""Return a new definition from the intersection of the definitions."""
result = self.copy()
result.intersection_update(other, ignore_conflicts)
return result
__or__ = union
__and__ = intersection
|
xflr6/concepts | concepts/definitions.py | Definition.union_update | python | def union_update(self, other, ignore_conflicts=False):
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects |= other._objects
self._properties |= other._properties
self._pairs |= other._pairs | Update the definition with the union of the ``other``. | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/definitions.py#L396-L402 | [
"def ensure_compatible(left, right):\n \"\"\"Raise an informative ``ValueError`` if the two definitions disagree.\"\"\"\n conflicts = list(conflicting_pairs(left, right))\n if conflicts:\n raise ValueError('conflicting values for object/property pairs: %r' % conflicts)\n"
] | class Definition(Triple):
"""Mutable triple of ``(objects, properties, bools)`` for creating a context.
Example:
>>> Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])
<Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])>
Usage:
>>> d = Definition()
>>> d
<Definition([], [], [])>
>>> d.add_object('King Arthur')
>>> print(d)
|
King Arthur|
>>> d.add_object('Sir Robin', ['human', 'knight'])
>>> d.add_object('holy grail')
>>> print(d)
|human|knight|
King Arthur| | |
Sir Robin |X |X |
holy grail | | |
>>> d.add_object('King Arthur', ['human', 'knight', 'king'])
>>> d.add_property('mysterious', ['holy grail', 'Sir Robin'])
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | |X |
holy grail | | | |X |
>>> d['Sir Robin', 'mysterious'] = False
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | | |
holy grail | | | |X |
>>> e = d.copy()
>>> e.move_object('holy grail', 0)
>>> e.move_property('mysterious', 0)
>>> e.move_property('king', 1)
>>> print(e)
|mysterious|king|human|knight|
holy grail |X | | | |
King Arthur| |X |X |X |
Sir Robin | | |X |X |
>>> e = d.copy()
>>> e.rename_object('Sir Robin', 'Launcelot')
>>> e.add_property('brave', ['Launcelot'])
>>> e.rename_object('holy grail', 'grail')
>>> e.rename_property('mysterious', 'holy')
>>> print(e)
|human|knight|king|holy|brave|
King Arthur|X |X |X | | |
Launcelot |X |X | | |X |
grail | | | |X | |
>>> print(e | d)
|human|knight|king|holy|brave|mysterious|
King Arthur|X |X |X | | | |
Launcelot |X |X | | |X | |
grail | | | |X | | |
Sir Robin |X |X | | | | |
holy grail | | | | | |X |
>>> print(e & d)
|human|knight|king|
King Arthur|X |X |X |
>>> e.remove_object('grail')
>>> e.remove_property('holy')
>>> e.rename_object('King Arthur', 'Arthur')
>>> e.set_property('king', [])
>>> e.set_object('Launcelot', ['human'])
>>> print(e)
|human|knight|king|brave|
Arthur |X |X | | |
Launcelot|X | | | |
>>> e.set_property('knight', ['Launcelot'])
>>> print(e)
|human|knight|king|brave|
Arthur |X | | | |
Launcelot|X |X | | |
"""
def rename_object(self, old, new):
"""Replace the name of an object by a new one."""
self._objects.replace(old, new)
pairs = self._pairs
pairs |= {(new, p) for p in self._properties
if (old, p) in pairs and not pairs.remove((old, p))}
def rename_property(self, old, new):
"""Replace the name of a property by a new one."""
self._properties.replace(old, new)
pairs = self._pairs
pairs |= {(o, new) for o in self._objects
if (o, old) in pairs and not pairs.remove((o, old))}
def move_object(self, obj, index):
"""Reorder the definition such that object is at ``index``."""
self._objects.move(obj, index)
def move_property(self, prop, index):
"""Reorder the definition such that property is at ``index``."""
self._properties.move(prop, index)
def __setitem__(self, pair, value):
if isinstance(pair, int):
raise ValueError("can't set item")
o, p = pair
self._objects.add(o)
self._properties.add(p)
if value:
self._pairs.add(pair)
else:
self._pairs.discard(pair)
def add_object(self, obj, properties=()):
"""Add an object to the definition and add ``properties`` as related."""
self._objects.add(obj)
self._properties |= properties
self._pairs.update((obj, p) for p in properties)
def add_property(self, prop, objects=()):
"""Add a property to the definition and add ``objects`` as related."""
self._properties.add(prop)
self._objects |= objects
self._pairs.update((o, prop) for o in objects)
def remove_object(self, obj):
"""Remove an object from the definition."""
self._objects.remove(obj)
self._pairs.difference_update((obj, p) for p in self._properties)
def remove_property(self, prop):
"""Remove a property from the definition."""
self._properties.remove(prop)
self._pairs.difference_update((o, prop) for o in self._objects)
def set_object(self, obj, properties):
"""Add an object to the definition and set its ``properties``."""
self._objects.add(obj)
properties = set(properties)
self._properties |= properties
pairs = self._pairs
for p in self._properties:
if p in properties:
pairs.add((obj, p))
else:
pairs.discard((obj, p))
def set_property(self, prop, objects):
"""Add a property to the definition and set its ``objects``."""
self._properties.add(prop)
objects = set(objects)
self._objects |= objects
pairs = self._pairs
for o in self._objects:
if o in objects:
pairs.add((o, prop))
else:
pairs.discard((o, prop))
def intersection_update(self, other, ignore_conflicts=False):
"""Update the definition with the intersection of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects &= other._objects
self._properties &= other._properties
self._pairs &= other._pairs
def __ior__(self, other):
self.union_update(other)
return self
def __iand__(self, other):
self.intersection_update(other)
return self
def union(self, other, ignore_conflicts=False):
"""Return a new definition from the union of the definitions."""
result = self.copy()
result.union_update(other, ignore_conflicts)
return result
def intersection(self, other, ignore_conflicts=False):
"""Return a new definition from the intersection of the definitions."""
result = self.copy()
result.intersection_update(other, ignore_conflicts)
return result
__or__ = union
__and__ = intersection
|
xflr6/concepts | concepts/definitions.py | Definition.union | python | def union(self, other, ignore_conflicts=False):
result = self.copy()
result.union_update(other, ignore_conflicts)
return result | Return a new definition from the union of the definitions. | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/definitions.py#L420-L424 | null | class Definition(Triple):
"""Mutable triple of ``(objects, properties, bools)`` for creating a context.
Example:
>>> Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])
<Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])>
Usage:
>>> d = Definition()
>>> d
<Definition([], [], [])>
>>> d.add_object('King Arthur')
>>> print(d)
|
King Arthur|
>>> d.add_object('Sir Robin', ['human', 'knight'])
>>> d.add_object('holy grail')
>>> print(d)
|human|knight|
King Arthur| | |
Sir Robin |X |X |
holy grail | | |
>>> d.add_object('King Arthur', ['human', 'knight', 'king'])
>>> d.add_property('mysterious', ['holy grail', 'Sir Robin'])
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | |X |
holy grail | | | |X |
>>> d['Sir Robin', 'mysterious'] = False
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | | |
holy grail | | | |X |
>>> e = d.copy()
>>> e.move_object('holy grail', 0)
>>> e.move_property('mysterious', 0)
>>> e.move_property('king', 1)
>>> print(e)
|mysterious|king|human|knight|
holy grail |X | | | |
King Arthur| |X |X |X |
Sir Robin | | |X |X |
>>> e = d.copy()
>>> e.rename_object('Sir Robin', 'Launcelot')
>>> e.add_property('brave', ['Launcelot'])
>>> e.rename_object('holy grail', 'grail')
>>> e.rename_property('mysterious', 'holy')
>>> print(e)
|human|knight|king|holy|brave|
King Arthur|X |X |X | | |
Launcelot |X |X | | |X |
grail | | | |X | |
>>> print(e | d)
|human|knight|king|holy|brave|mysterious|
King Arthur|X |X |X | | | |
Launcelot |X |X | | |X | |
grail | | | |X | | |
Sir Robin |X |X | | | | |
holy grail | | | | | |X |
>>> print(e & d)
|human|knight|king|
King Arthur|X |X |X |
>>> e.remove_object('grail')
>>> e.remove_property('holy')
>>> e.rename_object('King Arthur', 'Arthur')
>>> e.set_property('king', [])
>>> e.set_object('Launcelot', ['human'])
>>> print(e)
|human|knight|king|brave|
Arthur |X |X | | |
Launcelot|X | | | |
>>> e.set_property('knight', ['Launcelot'])
>>> print(e)
|human|knight|king|brave|
Arthur |X | | | |
Launcelot|X |X | | |
"""
def rename_object(self, old, new):
"""Replace the name of an object by a new one."""
self._objects.replace(old, new)
pairs = self._pairs
pairs |= {(new, p) for p in self._properties
if (old, p) in pairs and not pairs.remove((old, p))}
def rename_property(self, old, new):
"""Replace the name of a property by a new one."""
self._properties.replace(old, new)
pairs = self._pairs
pairs |= {(o, new) for o in self._objects
if (o, old) in pairs and not pairs.remove((o, old))}
def move_object(self, obj, index):
"""Reorder the definition such that object is at ``index``."""
self._objects.move(obj, index)
def move_property(self, prop, index):
"""Reorder the definition such that property is at ``index``."""
self._properties.move(prop, index)
def __setitem__(self, pair, value):
if isinstance(pair, int):
raise ValueError("can't set item")
o, p = pair
self._objects.add(o)
self._properties.add(p)
if value:
self._pairs.add(pair)
else:
self._pairs.discard(pair)
def add_object(self, obj, properties=()):
"""Add an object to the definition and add ``properties`` as related."""
self._objects.add(obj)
self._properties |= properties
self._pairs.update((obj, p) for p in properties)
def add_property(self, prop, objects=()):
"""Add a property to the definition and add ``objects`` as related."""
self._properties.add(prop)
self._objects |= objects
self._pairs.update((o, prop) for o in objects)
def remove_object(self, obj):
"""Remove an object from the definition."""
self._objects.remove(obj)
self._pairs.difference_update((obj, p) for p in self._properties)
def remove_property(self, prop):
"""Remove a property from the definition."""
self._properties.remove(prop)
self._pairs.difference_update((o, prop) for o in self._objects)
def set_object(self, obj, properties):
"""Add an object to the definition and set its ``properties``."""
self._objects.add(obj)
properties = set(properties)
self._properties |= properties
pairs = self._pairs
for p in self._properties:
if p in properties:
pairs.add((obj, p))
else:
pairs.discard((obj, p))
def set_property(self, prop, objects):
"""Add a property to the definition and set its ``objects``."""
self._properties.add(prop)
objects = set(objects)
self._objects |= objects
pairs = self._pairs
for o in self._objects:
if o in objects:
pairs.add((o, prop))
else:
pairs.discard((o, prop))
def union_update(self, other, ignore_conflicts=False):
"""Update the definition with the union of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects |= other._objects
self._properties |= other._properties
self._pairs |= other._pairs
def intersection_update(self, other, ignore_conflicts=False):
"""Update the definition with the intersection of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects &= other._objects
self._properties &= other._properties
self._pairs &= other._pairs
def __ior__(self, other):
self.union_update(other)
return self
def __iand__(self, other):
self.intersection_update(other)
return self
def intersection(self, other, ignore_conflicts=False):
"""Return a new definition from the intersection of the definitions."""
result = self.copy()
result.intersection_update(other, ignore_conflicts)
return result
__or__ = union
__and__ = intersection
|
xflr6/concepts | concepts/definitions.py | Definition.intersection | python | def intersection(self, other, ignore_conflicts=False):
result = self.copy()
result.intersection_update(other, ignore_conflicts)
return result | Return a new definition from the intersection of the definitions. | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/definitions.py#L426-L430 | null | class Definition(Triple):
"""Mutable triple of ``(objects, properties, bools)`` for creating a context.
Example:
>>> Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])
<Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])>
Usage:
>>> d = Definition()
>>> d
<Definition([], [], [])>
>>> d.add_object('King Arthur')
>>> print(d)
|
King Arthur|
>>> d.add_object('Sir Robin', ['human', 'knight'])
>>> d.add_object('holy grail')
>>> print(d)
|human|knight|
King Arthur| | |
Sir Robin |X |X |
holy grail | | |
>>> d.add_object('King Arthur', ['human', 'knight', 'king'])
>>> d.add_property('mysterious', ['holy grail', 'Sir Robin'])
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | |X |
holy grail | | | |X |
>>> d['Sir Robin', 'mysterious'] = False
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | | |
holy grail | | | |X |
>>> e = d.copy()
>>> e.move_object('holy grail', 0)
>>> e.move_property('mysterious', 0)
>>> e.move_property('king', 1)
>>> print(e)
|mysterious|king|human|knight|
holy grail |X | | | |
King Arthur| |X |X |X |
Sir Robin | | |X |X |
>>> e = d.copy()
>>> e.rename_object('Sir Robin', 'Launcelot')
>>> e.add_property('brave', ['Launcelot'])
>>> e.rename_object('holy grail', 'grail')
>>> e.rename_property('mysterious', 'holy')
>>> print(e)
|human|knight|king|holy|brave|
King Arthur|X |X |X | | |
Launcelot |X |X | | |X |
grail | | | |X | |
>>> print(e | d)
|human|knight|king|holy|brave|mysterious|
King Arthur|X |X |X | | | |
Launcelot |X |X | | |X | |
grail | | | |X | | |
Sir Robin |X |X | | | | |
holy grail | | | | | |X |
>>> print(e & d)
|human|knight|king|
King Arthur|X |X |X |
>>> e.remove_object('grail')
>>> e.remove_property('holy')
>>> e.rename_object('King Arthur', 'Arthur')
>>> e.set_property('king', [])
>>> e.set_object('Launcelot', ['human'])
>>> print(e)
|human|knight|king|brave|
Arthur |X |X | | |
Launcelot|X | | | |
>>> e.set_property('knight', ['Launcelot'])
>>> print(e)
|human|knight|king|brave|
Arthur |X | | | |
Launcelot|X |X | | |
"""
def rename_object(self, old, new):
"""Replace the name of an object by a new one."""
self._objects.replace(old, new)
pairs = self._pairs
pairs |= {(new, p) for p in self._properties
if (old, p) in pairs and not pairs.remove((old, p))}
def rename_property(self, old, new):
"""Replace the name of a property by a new one."""
self._properties.replace(old, new)
pairs = self._pairs
pairs |= {(o, new) for o in self._objects
if (o, old) in pairs and not pairs.remove((o, old))}
def move_object(self, obj, index):
"""Reorder the definition such that object is at ``index``."""
self._objects.move(obj, index)
def move_property(self, prop, index):
"""Reorder the definition such that property is at ``index``."""
self._properties.move(prop, index)
def __setitem__(self, pair, value):
if isinstance(pair, int):
raise ValueError("can't set item")
o, p = pair
self._objects.add(o)
self._properties.add(p)
if value:
self._pairs.add(pair)
else:
self._pairs.discard(pair)
def add_object(self, obj, properties=()):
"""Add an object to the definition and add ``properties`` as related."""
self._objects.add(obj)
self._properties |= properties
self._pairs.update((obj, p) for p in properties)
def add_property(self, prop, objects=()):
"""Add a property to the definition and add ``objects`` as related."""
self._properties.add(prop)
self._objects |= objects
self._pairs.update((o, prop) for o in objects)
def remove_object(self, obj):
"""Remove an object from the definition."""
self._objects.remove(obj)
self._pairs.difference_update((obj, p) for p in self._properties)
def remove_property(self, prop):
"""Remove a property from the definition."""
self._properties.remove(prop)
self._pairs.difference_update((o, prop) for o in self._objects)
def set_object(self, obj, properties):
"""Add an object to the definition and set its ``properties``."""
self._objects.add(obj)
properties = set(properties)
self._properties |= properties
pairs = self._pairs
for p in self._properties:
if p in properties:
pairs.add((obj, p))
else:
pairs.discard((obj, p))
def set_property(self, prop, objects):
"""Add a property to the definition and set its ``objects``."""
self._properties.add(prop)
objects = set(objects)
self._objects |= objects
pairs = self._pairs
for o in self._objects:
if o in objects:
pairs.add((o, prop))
else:
pairs.discard((o, prop))
def union_update(self, other, ignore_conflicts=False):
"""Update the definition with the union of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects |= other._objects
self._properties |= other._properties
self._pairs |= other._pairs
def intersection_update(self, other, ignore_conflicts=False):
"""Update the definition with the intersection of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects &= other._objects
self._properties &= other._properties
self._pairs &= other._pairs
def __ior__(self, other):
self.union_update(other)
return self
def __iand__(self, other):
self.intersection_update(other)
return self
def union(self, other, ignore_conflicts=False):
"""Return a new definition from the union of the definitions."""
result = self.copy()
result.union_update(other, ignore_conflicts)
return result
__or__ = union
__and__ = intersection
|
xflr6/concepts | concepts/tools.py | max_len | python | def max_len(iterable, minimum=0):
try:
result = max(map(len, iterable))
except ValueError:
result = minimum
return minimum if result < minimum else result | Return the len() of the longest item in ``iterable`` or ``minimum``.
>>> max_len(['spam', 'ham'])
4
>>> max_len([])
0
>>> max_len(['ham'], 4)
4 | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/tools.py#L123-L139 | null | # tools.py
import operator
import zlib
from itertools import permutations, groupby, starmap
from . import _compat
__all__ = ['Unique', 'max_len', 'maximal', 'lazyproperty', 'crc32_hex']
class Unique(_compat.MutableSet):
"""Unique items preserving order.
>>> Unique([3, 2, 1, 3, 2, 1, 0])
Unique([3, 2, 1, 0])
"""
@classmethod
def _fromargs(cls, _seen, _items):
inst = super(Unique, cls).__new__(cls)
inst._seen = _seen
inst._items = _items
return inst
def __init__(self, iterable=()):
self._seen = seen = set()
add = seen.add
self._items = [item for item in iterable
if item not in seen and not add(item)]
def copy(self):
return self._fromargs(self._seen.copy(), self._items[:])
def __iter__(self):
return iter(self._items)
def __len__(self):
return len(self._items)
def __contains__(self, item):
return item in self._seen
def __repr__(self):
items = repr(self._items) if self._items else ''
return '%s(%s)' % (self.__class__.__name__, items)
def add(self, item):
if item not in self._seen:
self._seen.add(item)
self._items.append(item)
def discard(self, item):
if item in self._seen:
self._seen.remove(item)
self._items.remove(item)
def replace(self, item, new_item):
"""Replace an item preserving order.
>>> u = Unique([0, 1, 2])
>>> u.replace(1, 'spam')
>>> u
Unique([0, 'spam', 2])
>>> u.replace('eggs', 1)
Traceback (most recent call last):
...
ValueError: 'eggs' is not in list
>>> u.replace('spam', 0)
Traceback (most recent call last):
...
ValueError: 0 already in list
"""
if new_item in self._seen:
raise ValueError('%r already in list' % new_item)
idx = self._items.index(item)
self._seen.remove(item)
self._seen.add(new_item)
self._items[idx] = new_item
def move(self, item, new_index):
"""Move an item to the given position.
>>> u = Unique(['spam', 'eggs'])
>>> u.move('spam', 1)
>>> u
Unique(['eggs', 'spam'])
>>> u.move('ham', 0)
Traceback (most recent call last):
...
ValueError: 'ham' is not in list
"""
idx = self._items.index(item)
if idx != new_index:
item = self._items.pop(idx)
self._items.insert(new_index, item)
def issuperset(self, items):
"""Return whether this collection contains all items.
>>> Unique(['spam', 'eggs']).issuperset(['spam', 'spam', 'spam'])
True
"""
return all(_compat.map(self._seen.__contains__, items))
def rsub(self, items):
"""Return order preserving unique items not in this collection.
>>> Unique(['spam']).rsub(['ham', 'spam', 'eggs'])
Unique(['ham', 'eggs'])
"""
ignore = self._seen
seen = set()
add = seen.add
items = [i for i in items
if i not in ignore and i not in seen and not add(i)]
return self._fromargs(seen, items)
def maximal(iterable, comparison=operator.lt, _groupkey=operator.itemgetter(0)):
"""Yield the unique maximal elements from ``iterable`` using ``comparison``.
>>> list(maximal([1, 2, 3, 3]))
[3]
>>> list(maximal([1]))
[1]
"""
iterable = set(iterable)
if len(iterable) < 2:
return iterable
return (item for item, pairs
in groupby(permutations(iterable, 2), key=_groupkey)
if not any(starmap(comparison, pairs)))
class lazyproperty(object):
"""Non-data descriptor caching the computed result as instance attribute.
>>> class Spam(object):
... @lazyproperty
... def eggs(self):
... return 'spamspamspam'
>>> spam=Spam(); spam.eggs
'spamspamspam'
>>> spam.eggs='eggseggseggs'; spam.eggs
'eggseggseggs'
>>> Spam().eggs
'spamspamspam'
>>> Spam.eggs # doctest: +ELLIPSIS
<...lazyproperty object at 0x...>
"""
def __init__(self, fget):
self.fget = fget
for attr in ('__module__', '__name__', '__doc__'):
setattr(self, attr, getattr(fget, attr))
def __get__(self, instance, owner):
if instance is None:
return self
result = instance.__dict__[self.__name__] = self.fget(instance)
return result
def crc32_hex(data):
"""Return unsigned CRC32 of binary data as hex-encoded string.
>>> crc32_hex(b'spam')
'43daff3d'
"""
return '%x' % (zlib.crc32(data) & 0xffffffff)
|
xflr6/concepts | concepts/tools.py | maximal | python | def maximal(iterable, comparison=operator.lt, _groupkey=operator.itemgetter(0)):
iterable = set(iterable)
if len(iterable) < 2:
return iterable
return (item for item, pairs
in groupby(permutations(iterable, 2), key=_groupkey)
if not any(starmap(comparison, pairs))) | Yield the unique maximal elements from ``iterable`` using ``comparison``.
>>> list(maximal([1, 2, 3, 3]))
[3]
>>> list(maximal([1]))
[1] | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/tools.py#L142-L156 | null | # tools.py
import operator
import zlib
from itertools import permutations, groupby, starmap
from . import _compat
__all__ = ['Unique', 'max_len', 'maximal', 'lazyproperty', 'crc32_hex']
class Unique(_compat.MutableSet):
"""Unique items preserving order.
>>> Unique([3, 2, 1, 3, 2, 1, 0])
Unique([3, 2, 1, 0])
"""
@classmethod
def _fromargs(cls, _seen, _items):
inst = super(Unique, cls).__new__(cls)
inst._seen = _seen
inst._items = _items
return inst
def __init__(self, iterable=()):
self._seen = seen = set()
add = seen.add
self._items = [item for item in iterable
if item not in seen and not add(item)]
def copy(self):
return self._fromargs(self._seen.copy(), self._items[:])
def __iter__(self):
return iter(self._items)
def __len__(self):
return len(self._items)
def __contains__(self, item):
return item in self._seen
def __repr__(self):
items = repr(self._items) if self._items else ''
return '%s(%s)' % (self.__class__.__name__, items)
def add(self, item):
if item not in self._seen:
self._seen.add(item)
self._items.append(item)
def discard(self, item):
if item in self._seen:
self._seen.remove(item)
self._items.remove(item)
def replace(self, item, new_item):
"""Replace an item preserving order.
>>> u = Unique([0, 1, 2])
>>> u.replace(1, 'spam')
>>> u
Unique([0, 'spam', 2])
>>> u.replace('eggs', 1)
Traceback (most recent call last):
...
ValueError: 'eggs' is not in list
>>> u.replace('spam', 0)
Traceback (most recent call last):
...
ValueError: 0 already in list
"""
if new_item in self._seen:
raise ValueError('%r already in list' % new_item)
idx = self._items.index(item)
self._seen.remove(item)
self._seen.add(new_item)
self._items[idx] = new_item
def move(self, item, new_index):
"""Move an item to the given position.
>>> u = Unique(['spam', 'eggs'])
>>> u.move('spam', 1)
>>> u
Unique(['eggs', 'spam'])
>>> u.move('ham', 0)
Traceback (most recent call last):
...
ValueError: 'ham' is not in list
"""
idx = self._items.index(item)
if idx != new_index:
item = self._items.pop(idx)
self._items.insert(new_index, item)
def issuperset(self, items):
"""Return whether this collection contains all items.
>>> Unique(['spam', 'eggs']).issuperset(['spam', 'spam', 'spam'])
True
"""
return all(_compat.map(self._seen.__contains__, items))
def rsub(self, items):
"""Return order preserving unique items not in this collection.
>>> Unique(['spam']).rsub(['ham', 'spam', 'eggs'])
Unique(['ham', 'eggs'])
"""
ignore = self._seen
seen = set()
add = seen.add
items = [i for i in items
if i not in ignore and i not in seen and not add(i)]
return self._fromargs(seen, items)
def max_len(iterable, minimum=0):
"""Return the len() of the longest item in ``iterable`` or ``minimum``.
>>> max_len(['spam', 'ham'])
4
>>> max_len([])
0
>>> max_len(['ham'], 4)
4
"""
try:
result = max(map(len, iterable))
except ValueError:
result = minimum
return minimum if result < minimum else result
class lazyproperty(object):
"""Non-data descriptor caching the computed result as instance attribute.
>>> class Spam(object):
... @lazyproperty
... def eggs(self):
... return 'spamspamspam'
>>> spam=Spam(); spam.eggs
'spamspamspam'
>>> spam.eggs='eggseggseggs'; spam.eggs
'eggseggseggs'
>>> Spam().eggs
'spamspamspam'
>>> Spam.eggs # doctest: +ELLIPSIS
<...lazyproperty object at 0x...>
"""
def __init__(self, fget):
self.fget = fget
for attr in ('__module__', '__name__', '__doc__'):
setattr(self, attr, getattr(fget, attr))
def __get__(self, instance, owner):
if instance is None:
return self
result = instance.__dict__[self.__name__] = self.fget(instance)
return result
def crc32_hex(data):
"""Return unsigned CRC32 of binary data as hex-encoded string.
>>> crc32_hex(b'spam')
'43daff3d'
"""
return '%x' % (zlib.crc32(data) & 0xffffffff)
|
xflr6/concepts | concepts/tools.py | Unique.replace | python | def replace(self, item, new_item):
if new_item in self._seen:
raise ValueError('%r already in list' % new_item)
idx = self._items.index(item)
self._seen.remove(item)
self._seen.add(new_item)
self._items[idx] = new_item | Replace an item preserving order.
>>> u = Unique([0, 1, 2])
>>> u.replace(1, 'spam')
>>> u
Unique([0, 'spam', 2])
>>> u.replace('eggs', 1)
Traceback (most recent call last):
...
ValueError: 'eggs' is not in list
>>> u.replace('spam', 0)
Traceback (most recent call last):
...
ValueError: 0 already in list | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/tools.py#L58-L81 | null | class Unique(_compat.MutableSet):
"""Unique items preserving order.
>>> Unique([3, 2, 1, 3, 2, 1, 0])
Unique([3, 2, 1, 0])
"""
@classmethod
def _fromargs(cls, _seen, _items):
inst = super(Unique, cls).__new__(cls)
inst._seen = _seen
inst._items = _items
return inst
def __init__(self, iterable=()):
self._seen = seen = set()
add = seen.add
self._items = [item for item in iterable
if item not in seen and not add(item)]
def copy(self):
return self._fromargs(self._seen.copy(), self._items[:])
def __iter__(self):
return iter(self._items)
def __len__(self):
return len(self._items)
def __contains__(self, item):
return item in self._seen
def __repr__(self):
items = repr(self._items) if self._items else ''
return '%s(%s)' % (self.__class__.__name__, items)
def add(self, item):
if item not in self._seen:
self._seen.add(item)
self._items.append(item)
def discard(self, item):
if item in self._seen:
self._seen.remove(item)
self._items.remove(item)
def move(self, item, new_index):
"""Move an item to the given position.
>>> u = Unique(['spam', 'eggs'])
>>> u.move('spam', 1)
>>> u
Unique(['eggs', 'spam'])
>>> u.move('ham', 0)
Traceback (most recent call last):
...
ValueError: 'ham' is not in list
"""
idx = self._items.index(item)
if idx != new_index:
item = self._items.pop(idx)
self._items.insert(new_index, item)
def issuperset(self, items):
"""Return whether this collection contains all items.
>>> Unique(['spam', 'eggs']).issuperset(['spam', 'spam', 'spam'])
True
"""
return all(_compat.map(self._seen.__contains__, items))
def rsub(self, items):
"""Return order preserving unique items not in this collection.
>>> Unique(['spam']).rsub(['ham', 'spam', 'eggs'])
Unique(['ham', 'eggs'])
"""
ignore = self._seen
seen = set()
add = seen.add
items = [i for i in items
if i not in ignore and i not in seen and not add(i)]
return self._fromargs(seen, items)
|
xflr6/concepts | concepts/tools.py | Unique.move | python | def move(self, item, new_index):
idx = self._items.index(item)
if idx != new_index:
item = self._items.pop(idx)
self._items.insert(new_index, item) | Move an item to the given position.
>>> u = Unique(['spam', 'eggs'])
>>> u.move('spam', 1)
>>> u
Unique(['eggs', 'spam'])
>>> u.move('ham', 0)
Traceback (most recent call last):
...
ValueError: 'ham' is not in list | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/tools.py#L83-L99 | null | class Unique(_compat.MutableSet):
"""Unique items preserving order.
>>> Unique([3, 2, 1, 3, 2, 1, 0])
Unique([3, 2, 1, 0])
"""
@classmethod
def _fromargs(cls, _seen, _items):
inst = super(Unique, cls).__new__(cls)
inst._seen = _seen
inst._items = _items
return inst
def __init__(self, iterable=()):
self._seen = seen = set()
add = seen.add
self._items = [item for item in iterable
if item not in seen and not add(item)]
def copy(self):
return self._fromargs(self._seen.copy(), self._items[:])
def __iter__(self):
return iter(self._items)
def __len__(self):
return len(self._items)
def __contains__(self, item):
return item in self._seen
def __repr__(self):
items = repr(self._items) if self._items else ''
return '%s(%s)' % (self.__class__.__name__, items)
def add(self, item):
if item not in self._seen:
self._seen.add(item)
self._items.append(item)
def discard(self, item):
if item in self._seen:
self._seen.remove(item)
self._items.remove(item)
def replace(self, item, new_item):
"""Replace an item preserving order.
>>> u = Unique([0, 1, 2])
>>> u.replace(1, 'spam')
>>> u
Unique([0, 'spam', 2])
>>> u.replace('eggs', 1)
Traceback (most recent call last):
...
ValueError: 'eggs' is not in list
>>> u.replace('spam', 0)
Traceback (most recent call last):
...
ValueError: 0 already in list
"""
if new_item in self._seen:
raise ValueError('%r already in list' % new_item)
idx = self._items.index(item)
self._seen.remove(item)
self._seen.add(new_item)
self._items[idx] = new_item
def issuperset(self, items):
"""Return whether this collection contains all items.
>>> Unique(['spam', 'eggs']).issuperset(['spam', 'spam', 'spam'])
True
"""
return all(_compat.map(self._seen.__contains__, items))
def rsub(self, items):
"""Return order preserving unique items not in this collection.
>>> Unique(['spam']).rsub(['ham', 'spam', 'eggs'])
Unique(['ham', 'eggs'])
"""
ignore = self._seen
seen = set()
add = seen.add
items = [i for i in items
if i not in ignore and i not in seen and not add(i)]
return self._fromargs(seen, items)
|
xflr6/concepts | concepts/tools.py | Unique.issuperset | python | def issuperset(self, items):
return all(_compat.map(self._seen.__contains__, items)) | Return whether this collection contains all items.
>>> Unique(['spam', 'eggs']).issuperset(['spam', 'spam', 'spam'])
True | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/tools.py#L101-L107 | null | class Unique(_compat.MutableSet):
"""Unique items preserving order.
>>> Unique([3, 2, 1, 3, 2, 1, 0])
Unique([3, 2, 1, 0])
"""
@classmethod
def _fromargs(cls, _seen, _items):
inst = super(Unique, cls).__new__(cls)
inst._seen = _seen
inst._items = _items
return inst
def __init__(self, iterable=()):
self._seen = seen = set()
add = seen.add
self._items = [item for item in iterable
if item not in seen and not add(item)]
def copy(self):
return self._fromargs(self._seen.copy(), self._items[:])
def __iter__(self):
return iter(self._items)
def __len__(self):
return len(self._items)
def __contains__(self, item):
return item in self._seen
def __repr__(self):
items = repr(self._items) if self._items else ''
return '%s(%s)' % (self.__class__.__name__, items)
def add(self, item):
if item not in self._seen:
self._seen.add(item)
self._items.append(item)
def discard(self, item):
if item in self._seen:
self._seen.remove(item)
self._items.remove(item)
def replace(self, item, new_item):
"""Replace an item preserving order.
>>> u = Unique([0, 1, 2])
>>> u.replace(1, 'spam')
>>> u
Unique([0, 'spam', 2])
>>> u.replace('eggs', 1)
Traceback (most recent call last):
...
ValueError: 'eggs' is not in list
>>> u.replace('spam', 0)
Traceback (most recent call last):
...
ValueError: 0 already in list
"""
if new_item in self._seen:
raise ValueError('%r already in list' % new_item)
idx = self._items.index(item)
self._seen.remove(item)
self._seen.add(new_item)
self._items[idx] = new_item
def move(self, item, new_index):
"""Move an item to the given position.
>>> u = Unique(['spam', 'eggs'])
>>> u.move('spam', 1)
>>> u
Unique(['eggs', 'spam'])
>>> u.move('ham', 0)
Traceback (most recent call last):
...
ValueError: 'ham' is not in list
"""
idx = self._items.index(item)
if idx != new_index:
item = self._items.pop(idx)
self._items.insert(new_index, item)
def rsub(self, items):
"""Return order preserving unique items not in this collection.
>>> Unique(['spam']).rsub(['ham', 'spam', 'eggs'])
Unique(['ham', 'eggs'])
"""
ignore = self._seen
seen = set()
add = seen.add
items = [i for i in items
if i not in ignore and i not in seen and not add(i)]
return self._fromargs(seen, items)
|
xflr6/concepts | concepts/tools.py | Unique.rsub | python | def rsub(self, items):
ignore = self._seen
seen = set()
add = seen.add
items = [i for i in items
if i not in ignore and i not in seen and not add(i)]
return self._fromargs(seen, items) | Return order preserving unique items not in this collection.
>>> Unique(['spam']).rsub(['ham', 'spam', 'eggs'])
Unique(['ham', 'eggs']) | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/tools.py#L109-L120 | null | class Unique(_compat.MutableSet):
"""Unique items preserving order.
>>> Unique([3, 2, 1, 3, 2, 1, 0])
Unique([3, 2, 1, 0])
"""
@classmethod
def _fromargs(cls, _seen, _items):
inst = super(Unique, cls).__new__(cls)
inst._seen = _seen
inst._items = _items
return inst
def __init__(self, iterable=()):
self._seen = seen = set()
add = seen.add
self._items = [item for item in iterable
if item not in seen and not add(item)]
def copy(self):
return self._fromargs(self._seen.copy(), self._items[:])
def __iter__(self):
return iter(self._items)
def __len__(self):
return len(self._items)
def __contains__(self, item):
return item in self._seen
def __repr__(self):
items = repr(self._items) if self._items else ''
return '%s(%s)' % (self.__class__.__name__, items)
def add(self, item):
if item not in self._seen:
self._seen.add(item)
self._items.append(item)
def discard(self, item):
if item in self._seen:
self._seen.remove(item)
self._items.remove(item)
def replace(self, item, new_item):
"""Replace an item preserving order.
>>> u = Unique([0, 1, 2])
>>> u.replace(1, 'spam')
>>> u
Unique([0, 'spam', 2])
>>> u.replace('eggs', 1)
Traceback (most recent call last):
...
ValueError: 'eggs' is not in list
>>> u.replace('spam', 0)
Traceback (most recent call last):
...
ValueError: 0 already in list
"""
if new_item in self._seen:
raise ValueError('%r already in list' % new_item)
idx = self._items.index(item)
self._seen.remove(item)
self._seen.add(new_item)
self._items[idx] = new_item
def move(self, item, new_index):
"""Move an item to the given position.
>>> u = Unique(['spam', 'eggs'])
>>> u.move('spam', 1)
>>> u
Unique(['eggs', 'spam'])
>>> u.move('ham', 0)
Traceback (most recent call last):
...
ValueError: 'ham' is not in list
"""
idx = self._items.index(item)
if idx != new_index:
item = self._items.pop(idx)
self._items.insert(new_index, item)
def issuperset(self, items):
"""Return whether this collection contains all items.
>>> Unique(['spam', 'eggs']).issuperset(['spam', 'spam', 'spam'])
True
"""
return all(_compat.map(self._seen.__contains__, items))
|
crossbario/txaio | txaio/_iotype.py | guess_stream_needs_encoding | python | def guess_stream_needs_encoding(fileobj, default=True):
# XXX: Unicode
# On Python 2, stdout is bytes. However, we can't wrap it in a
# TextIOWrapper, as it's not from IOBase, so it doesn't have .seekable.
# It does, however, have a mode, and we can cheese it base on that.
# On Python 3, stdout is a TextIOWrapper, and so we can safely write
# str to it, and it will encode it correctly for the target terminal or
# whatever.
# If it's a io.BytesIO or StringIO, then it won't have a mode, but it
# is a read/write stream, so we can get its type by reading 0 bytes and
# checking the type.
try:
# If it's a r/w stream, this will give us the type of it
t = type(fileobj.read(0))
if t is bytes:
return True
elif t is unicode:
return False
except Exception:
pass
try:
mode = fileobj.mode
if PY2 and mode == "w":
mode = "wb"
if "b" in mode:
return True
else:
return False
except Exception:
pass
return default | Guess the type (bytes/unicode) of this stream, and return whether or not it
requires text to be encoded before written into it. | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/_iotype.py#L37-L77 | null | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from __future__ import absolute_import, division
from six import PY3, PY2
if PY3:
unicode = str
else:
unicode = unicode
|
crossbario/txaio | txaio/aio.py | with_config | python | def with_config(loop=None):
cfg = _Config()
if loop is not None:
cfg.loop = loop
return _AsyncioApi(cfg) | :return: an instance of the txaio API with the given
configuration. This won't affect anything using the 'gloabl'
config nor other instances created using this function.
If you need to customize txaio configuration separately (e.g. to
use multiple event-loops in asyncio), you can take code like this:
import txaio
class FunTimes(object):
def something_async(self):
return txaio.call_later(1, lambda: 'some result')
and instead do this:
import txaio
class FunTimes(object):
txaio = txaio
def something_async(self):
# this will run in the local/new event loop created in the constructor
return self.txaio.call_later(1, lambda: 'some result')
fun0 = FunTimes()
fun1 = FunTimes()
fun1.txaio = txaio.with_config(loop=asyncio.new_event_loop())
So `fun1` will run its futures on the newly-created event loop,
while `fun0` will work just as it did before this `with_config`
method was introduced (after 2.6.2). | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/aio.py#L96-L136 | null | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from __future__ import absolute_import, print_function
import os
import sys
import time
import weakref
import functools
import traceback
import logging
import inspect
from datetime import datetime
from txaio.interfaces import IFailedFuture, ILogger, log_levels
from txaio._iotype import guess_stream_needs_encoding
from txaio._common import _BatchedTimer
from txaio import _Config
import six
try:
import asyncio
from asyncio import iscoroutine
from asyncio import Future
except ImportError:
# Trollius >= 0.3 was renamed
# noinspection PyUnresolvedReferences
import trollius as asyncio
from trollius import iscoroutine
from trollius import Future
try:
from types import AsyncGeneratorType # python 3.5+
except ImportError:
class AsyncGeneratorType(object):
pass
def _create_future_of_loop(loop):
return loop.create_future()
def _create_future_directly(loop=None):
return Future(loop=loop)
def _create_task_of_loop(res, loop):
return loop.create_task(res)
def _create_task_directly(res, loop=None):
return asyncio.Task(res, loop=loop)
if sys.version_info >= (3, 4, 2):
_create_task = _create_task_of_loop
if sys.version_info >= (3, 5, 2):
_create_future = _create_future_of_loop
else:
_create_future = _create_future_directly
else:
_create_task = _create_task_directly
_create_future = _create_future_directly
config = _Config()
# logging should probably all be folded into _AsyncioApi as well
_stderr, _stdout = sys.stderr, sys.stdout
_loggers = weakref.WeakSet() # weak-ref's of each logger we've created before start_logging()
_log_level = 'info' # re-set by start_logging
_started_logging = False
_categories = {}
def add_log_categories(categories):
_categories.update(categories)
class FailedFuture(IFailedFuture):
"""
This provides an object with any features from Twisted's Failure
that we might need in Autobahn classes that use FutureMixin.
We need to encapsulate information from exceptions so that
errbacks still have access to the traceback (in case they want to
print it out) outside of "except" blocks.
"""
def __init__(self, type_, value, traceback):
"""
These are the same parameters as returned from ``sys.exc_info()``
:param type_: exception type
:param value: the Exception instance
:param traceback: a traceback object
"""
self._type = type_
self._value = value
self._traceback = traceback
@property
def value(self):
return self._value
def __str__(self):
return str(self.value)
# logging API methods
def _log(logger, level, format=u'', **kwargs):
# Look for a log_category, switch it in if we have it
if "log_category" in kwargs and kwargs["log_category"] in _categories:
format = _categories.get(kwargs["log_category"])
kwargs['log_time'] = time.time()
kwargs['log_level'] = level
kwargs['log_format'] = format
# NOTE: turning kwargs into a single "argument which
# is a dict" on purpose, since a LogRecord only keeps
# args, not kwargs.
if level == 'trace':
level = 'debug'
kwargs['txaio_trace'] = True
msg = format.format(**kwargs)
getattr(logger._logger, level)(msg)
def _no_op(*args, **kw):
pass
class _TxaioLogWrapper(ILogger):
def __init__(self, logger):
self._logger = logger
self._set_log_level(_log_level)
def emit(self, level, *args, **kwargs):
func = getattr(self, level)
return func(*args, **kwargs)
def _set_log_level(self, level):
target_level = log_levels.index(level)
# this binds either _log or _no_op above to this instance,
# depending on the desired level.
for (idx, name) in enumerate(log_levels):
if idx <= target_level:
log_method = functools.partial(_log, self, name)
else:
log_method = _no_op
setattr(self, name, log_method)
self._log_level = level
class _TxaioFileHandler(logging.Handler, object):
def __init__(self, fileobj, **kw):
super(_TxaioFileHandler, self).__init__(**kw)
self._file = fileobj
self._encode = guess_stream_needs_encoding(fileobj)
def emit(self, record):
if isinstance(record.args, dict):
fmt = record.args.get(
'log_format',
record.args.get('log_message', u'')
)
message = fmt.format(**record.args)
dt = datetime.fromtimestamp(record.args.get('log_time', 0))
else:
message = record.getMessage()
if record.levelno == logging.ERROR and record.exc_info:
message += '\n'
for line in traceback.format_exception(*record.exc_info):
message = message + line
dt = datetime.fromtimestamp(record.created)
msg = u'{0} {1}{2}'.format(
dt.strftime("%Y-%m-%dT%H:%M:%S%z"),
message,
os.linesep
)
if self._encode:
msg = msg.encode('utf8')
self._file.write(msg)
def make_logger():
# we want the namespace to be the calling context of "make_logger"
# otherwise the root logger will be returned
cf = inspect.currentframe().f_back
if "self" in cf.f_locals:
# We're probably in a class init or method
cls = cf.f_locals["self"].__class__
namespace = '{0}.{1}'.format(cls.__module__, cls.__name__)
else:
namespace = cf.f_globals["__name__"]
if cf.f_code.co_name != "<module>":
# If it's not the module, and not in a class instance, add the code
# object's name.
namespace = namespace + "." + cf.f_code.co_name
logger = _TxaioLogWrapper(logging.getLogger(name=namespace))
# remember this so we can set their levels properly once
# start_logging is actually called
_loggers.add(logger)
return logger
def start_logging(out=_stdout, level='info'):
"""
Begin logging.
:param out: if provided, a file-like object to log to. By default, this is
stdout.
:param level: the maximum log-level to emit (a string)
"""
global _log_level, _loggers, _started_logging
if level not in log_levels:
raise RuntimeError(
"Invalid log level '{0}'; valid are: {1}".format(
level, ', '.join(log_levels)
)
)
if _started_logging:
return
_started_logging = True
_log_level = level
handler = _TxaioFileHandler(out)
logging.getLogger().addHandler(handler)
# note: Don't need to call basicConfig() or similar, because we've
# now added at least one handler to the root logger
logging.raiseExceptions = True # FIXME
level_to_stdlib = {
'critical': logging.CRITICAL,
'error': logging.ERROR,
'warn': logging.WARNING,
'info': logging.INFO,
'debug': logging.DEBUG,
'trace': logging.DEBUG,
}
logging.getLogger().setLevel(level_to_stdlib[level])
# make sure any loggers we created before now have their log-level
# set (any created after now will get it from _log_level
for logger in _loggers:
logger._set_log_level(level)
def set_global_log_level(level):
"""
Set the global log level on all loggers instantiated by txaio.
"""
for logger in _loggers:
logger._set_log_level(level)
global _log_level
_log_level = level
def get_global_log_level():
return _log_level
# asyncio API methods; the module-level functions are (now, for
# backwards-compat) exported from a default instance of this class
_unspecified = object()
class _AsyncioApi(object):
using_twisted = False
using_asyncio = True
def __init__(self, config):
if config.loop is None:
config.loop = asyncio.get_event_loop()
self._config = config
def failure_message(self, fail):
"""
:param fail: must be an IFailedFuture
returns a unicode error-message
"""
try:
return u'{0}: {1}'.format(
fail._value.__class__.__name__,
str(fail._value),
)
except Exception:
return u'Failed to produce failure message for "{0}"'.format(fail)
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail._traceback
def failure_format_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a string
"""
try:
f = six.StringIO()
traceback.print_exception(
fail._type,
fail.value,
fail._traceback,
file=f,
)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail)
def create_future(self, result=_unspecified, error=_unspecified, canceller=_unspecified):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = _create_future(loop=self._config.loop)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
# Twisted's only API for cancelling is to pass a
# single-argument callable to the Deferred constructor, so
# txaio apes that here for asyncio. The argument is the Future
# that has been cancelled.
if canceller is not _unspecified:
def done(f):
try:
f.exception()
except asyncio.CancelledError:
canceller(f)
f.add_done_callback(done)
return f
def create_future_success(self, result):
return self.create_future(result=result)
def create_future_error(self, error=None):
f = self.create_future()
reject(f, error)
return f
def as_future(self, fun, *args, **kwargs):
try:
res = fun(*args, **kwargs)
except Exception:
return create_future_error(create_failure())
else:
if isinstance(res, Future):
return res
elif iscoroutine(res):
return _create_task(res, loop=self._config.loop)
elif isinstance(res, AsyncGeneratorType):
raise RuntimeError(
"as_future() received an async generator function; does "
"'{}' use 'yield' when you meant 'await'?".format(
str(fun)
)
)
else:
return create_future_success(res)
def is_future(self, obj):
return iscoroutine(obj) or isinstance(obj, Future)
def call_later(self, delay, fun, *args, **kwargs):
# loop.call_later doesn't support kwargs
real_call = functools.partial(fun, *args, **kwargs)
return self._config.loop.call_later(delay, real_call)
def make_batched_timer(self, bucket_seconds, chunk_size=100):
"""
Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor.
"""
def get_seconds():
return self._config.loop.time()
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=self.call_later,
)
def is_called(self, future):
return future.done()
def resolve(self, future, result=None):
future.set_result(result)
def reject(self, future, error=None):
if error is None:
error = create_failure() # will be error if we're not in an "except"
elif isinstance(error, Exception):
error = FailedFuture(type(error), error, None)
else:
if not isinstance(error, IFailedFuture):
raise RuntimeError("reject requires an IFailedFuture or Exception")
future.set_exception(error.value)
def cancel(self, future):
future.cancel()
def create_failure(self, exception=None):
"""
This returns an object implementing IFailedFuture.
If exception is None (the default) we MUST be called within an
"except" block (such that sys.exc_info() returns useful
information).
"""
if exception:
return FailedFuture(type(exception), exception, None)
return FailedFuture(*sys.exc_info())
def add_callbacks(self, future, callback, errback):
"""
callback or errback may be None, but at least one must be
non-None.
"""
def done(f):
try:
res = f.result()
if callback:
callback(res)
except Exception:
if errback:
errback(create_failure())
return future.add_done_callback(done)
def gather(self, futures, consume_exceptions=True):
"""
This returns a Future that waits for all the Futures in the list
``futures``
:param futures: a list of Futures (or coroutines?)
:param consume_exceptions: if True, any errors are eaten and
returned in the result list.
"""
# from the asyncio docs: "If return_exceptions is True, exceptions
# in the tasks are treated the same as successful results, and
# gathered in the result list; otherwise, the first raised
# exception will be immediately propagated to the returned
# future."
return asyncio.gather(*futures, return_exceptions=consume_exceptions)
def sleep(self, delay):
"""
Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float
"""
return asyncio.ensure_future(asyncio.sleep(delay))
_default_api = _AsyncioApi(config)
using_twisted = _default_api.using_twisted
using_asyncio = _default_api.using_asyncio
sleep = _default_api.sleep
failure_message = _default_api.failure_message
failure_traceback = _default_api.failure_traceback
failure_format_traceback = _default_api.failure_format_traceback
create_future = _default_api.create_future
create_future_success = _default_api.create_future_success
create_future_error = _default_api.create_future_error
as_future = _default_api.as_future
is_future = _default_api.is_future
call_later = _default_api.call_later
make_batched_timer = _default_api.make_batched_timer
is_called = _default_api.is_called
resolve = _default_api.resolve
reject = _default_api.reject
cancel = _default_api.cancel
create_failure = _default_api.create_failure
add_callbacks = _default_api.add_callbacks
gather = _default_api.gather
sleep = _default_api.sleep
|
crossbario/txaio | txaio/aio.py | start_logging | python | def start_logging(out=_stdout, level='info'):
global _log_level, _loggers, _started_logging
if level not in log_levels:
raise RuntimeError(
"Invalid log level '{0}'; valid are: {1}".format(
level, ', '.join(log_levels)
)
)
if _started_logging:
return
_started_logging = True
_log_level = level
handler = _TxaioFileHandler(out)
logging.getLogger().addHandler(handler)
# note: Don't need to call basicConfig() or similar, because we've
# now added at least one handler to the root logger
logging.raiseExceptions = True # FIXME
level_to_stdlib = {
'critical': logging.CRITICAL,
'error': logging.ERROR,
'warn': logging.WARNING,
'info': logging.INFO,
'debug': logging.DEBUG,
'trace': logging.DEBUG,
}
logging.getLogger().setLevel(level_to_stdlib[level])
# make sure any loggers we created before now have their log-level
# set (any created after now will get it from _log_level
for logger in _loggers:
logger._set_log_level(level) | Begin logging.
:param out: if provided, a file-like object to log to. By default, this is
stdout.
:param level: the maximum log-level to emit (a string) | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/aio.py#L283-L322 | null | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from __future__ import absolute_import, print_function
import os
import sys
import time
import weakref
import functools
import traceback
import logging
import inspect
from datetime import datetime
from txaio.interfaces import IFailedFuture, ILogger, log_levels
from txaio._iotype import guess_stream_needs_encoding
from txaio._common import _BatchedTimer
from txaio import _Config
import six
try:
import asyncio
from asyncio import iscoroutine
from asyncio import Future
except ImportError:
# Trollius >= 0.3 was renamed
# noinspection PyUnresolvedReferences
import trollius as asyncio
from trollius import iscoroutine
from trollius import Future
try:
from types import AsyncGeneratorType # python 3.5+
except ImportError:
class AsyncGeneratorType(object):
pass
def _create_future_of_loop(loop):
return loop.create_future()
def _create_future_directly(loop=None):
return Future(loop=loop)
def _create_task_of_loop(res, loop):
return loop.create_task(res)
def _create_task_directly(res, loop=None):
return asyncio.Task(res, loop=loop)
if sys.version_info >= (3, 4, 2):
_create_task = _create_task_of_loop
if sys.version_info >= (3, 5, 2):
_create_future = _create_future_of_loop
else:
_create_future = _create_future_directly
else:
_create_task = _create_task_directly
_create_future = _create_future_directly
config = _Config()
def with_config(loop=None):
"""
:return: an instance of the txaio API with the given
configuration. This won't affect anything using the 'gloabl'
config nor other instances created using this function.
If you need to customize txaio configuration separately (e.g. to
use multiple event-loops in asyncio), you can take code like this:
import txaio
class FunTimes(object):
def something_async(self):
return txaio.call_later(1, lambda: 'some result')
and instead do this:
import txaio
class FunTimes(object):
txaio = txaio
def something_async(self):
# this will run in the local/new event loop created in the constructor
return self.txaio.call_later(1, lambda: 'some result')
fun0 = FunTimes()
fun1 = FunTimes()
fun1.txaio = txaio.with_config(loop=asyncio.new_event_loop())
So `fun1` will run its futures on the newly-created event loop,
while `fun0` will work just as it did before this `with_config`
method was introduced (after 2.6.2).
"""
cfg = _Config()
if loop is not None:
cfg.loop = loop
return _AsyncioApi(cfg)
# logging should probably all be folded into _AsyncioApi as well
_stderr, _stdout = sys.stderr, sys.stdout
_loggers = weakref.WeakSet() # weak-ref's of each logger we've created before start_logging()
_log_level = 'info' # re-set by start_logging
_started_logging = False
_categories = {}
def add_log_categories(categories):
_categories.update(categories)
class FailedFuture(IFailedFuture):
"""
This provides an object with any features from Twisted's Failure
that we might need in Autobahn classes that use FutureMixin.
We need to encapsulate information from exceptions so that
errbacks still have access to the traceback (in case they want to
print it out) outside of "except" blocks.
"""
def __init__(self, type_, value, traceback):
"""
These are the same parameters as returned from ``sys.exc_info()``
:param type_: exception type
:param value: the Exception instance
:param traceback: a traceback object
"""
self._type = type_
self._value = value
self._traceback = traceback
@property
def value(self):
return self._value
def __str__(self):
return str(self.value)
# logging API methods
def _log(logger, level, format=u'', **kwargs):
# Look for a log_category, switch it in if we have it
if "log_category" in kwargs and kwargs["log_category"] in _categories:
format = _categories.get(kwargs["log_category"])
kwargs['log_time'] = time.time()
kwargs['log_level'] = level
kwargs['log_format'] = format
# NOTE: turning kwargs into a single "argument which
# is a dict" on purpose, since a LogRecord only keeps
# args, not kwargs.
if level == 'trace':
level = 'debug'
kwargs['txaio_trace'] = True
msg = format.format(**kwargs)
getattr(logger._logger, level)(msg)
def _no_op(*args, **kw):
pass
class _TxaioLogWrapper(ILogger):
def __init__(self, logger):
self._logger = logger
self._set_log_level(_log_level)
def emit(self, level, *args, **kwargs):
func = getattr(self, level)
return func(*args, **kwargs)
def _set_log_level(self, level):
target_level = log_levels.index(level)
# this binds either _log or _no_op above to this instance,
# depending on the desired level.
for (idx, name) in enumerate(log_levels):
if idx <= target_level:
log_method = functools.partial(_log, self, name)
else:
log_method = _no_op
setattr(self, name, log_method)
self._log_level = level
class _TxaioFileHandler(logging.Handler, object):
def __init__(self, fileobj, **kw):
super(_TxaioFileHandler, self).__init__(**kw)
self._file = fileobj
self._encode = guess_stream_needs_encoding(fileobj)
def emit(self, record):
if isinstance(record.args, dict):
fmt = record.args.get(
'log_format',
record.args.get('log_message', u'')
)
message = fmt.format(**record.args)
dt = datetime.fromtimestamp(record.args.get('log_time', 0))
else:
message = record.getMessage()
if record.levelno == logging.ERROR and record.exc_info:
message += '\n'
for line in traceback.format_exception(*record.exc_info):
message = message + line
dt = datetime.fromtimestamp(record.created)
msg = u'{0} {1}{2}'.format(
dt.strftime("%Y-%m-%dT%H:%M:%S%z"),
message,
os.linesep
)
if self._encode:
msg = msg.encode('utf8')
self._file.write(msg)
def make_logger():
# we want the namespace to be the calling context of "make_logger"
# otherwise the root logger will be returned
cf = inspect.currentframe().f_back
if "self" in cf.f_locals:
# We're probably in a class init or method
cls = cf.f_locals["self"].__class__
namespace = '{0}.{1}'.format(cls.__module__, cls.__name__)
else:
namespace = cf.f_globals["__name__"]
if cf.f_code.co_name != "<module>":
# If it's not the module, and not in a class instance, add the code
# object's name.
namespace = namespace + "." + cf.f_code.co_name
logger = _TxaioLogWrapper(logging.getLogger(name=namespace))
# remember this so we can set their levels properly once
# start_logging is actually called
_loggers.add(logger)
return logger
def set_global_log_level(level):
"""
Set the global log level on all loggers instantiated by txaio.
"""
for logger in _loggers:
logger._set_log_level(level)
global _log_level
_log_level = level
def get_global_log_level():
return _log_level
# asyncio API methods; the module-level functions are (now, for
# backwards-compat) exported from a default instance of this class
_unspecified = object()
class _AsyncioApi(object):
using_twisted = False
using_asyncio = True
def __init__(self, config):
if config.loop is None:
config.loop = asyncio.get_event_loop()
self._config = config
def failure_message(self, fail):
"""
:param fail: must be an IFailedFuture
returns a unicode error-message
"""
try:
return u'{0}: {1}'.format(
fail._value.__class__.__name__,
str(fail._value),
)
except Exception:
return u'Failed to produce failure message for "{0}"'.format(fail)
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail._traceback
def failure_format_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a string
"""
try:
f = six.StringIO()
traceback.print_exception(
fail._type,
fail.value,
fail._traceback,
file=f,
)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail)
def create_future(self, result=_unspecified, error=_unspecified, canceller=_unspecified):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = _create_future(loop=self._config.loop)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
# Twisted's only API for cancelling is to pass a
# single-argument callable to the Deferred constructor, so
# txaio apes that here for asyncio. The argument is the Future
# that has been cancelled.
if canceller is not _unspecified:
def done(f):
try:
f.exception()
except asyncio.CancelledError:
canceller(f)
f.add_done_callback(done)
return f
def create_future_success(self, result):
return self.create_future(result=result)
def create_future_error(self, error=None):
f = self.create_future()
reject(f, error)
return f
def as_future(self, fun, *args, **kwargs):
try:
res = fun(*args, **kwargs)
except Exception:
return create_future_error(create_failure())
else:
if isinstance(res, Future):
return res
elif iscoroutine(res):
return _create_task(res, loop=self._config.loop)
elif isinstance(res, AsyncGeneratorType):
raise RuntimeError(
"as_future() received an async generator function; does "
"'{}' use 'yield' when you meant 'await'?".format(
str(fun)
)
)
else:
return create_future_success(res)
def is_future(self, obj):
return iscoroutine(obj) or isinstance(obj, Future)
def call_later(self, delay, fun, *args, **kwargs):
# loop.call_later doesn't support kwargs
real_call = functools.partial(fun, *args, **kwargs)
return self._config.loop.call_later(delay, real_call)
def make_batched_timer(self, bucket_seconds, chunk_size=100):
"""
Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor.
"""
def get_seconds():
return self._config.loop.time()
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=self.call_later,
)
def is_called(self, future):
return future.done()
def resolve(self, future, result=None):
future.set_result(result)
def reject(self, future, error=None):
if error is None:
error = create_failure() # will be error if we're not in an "except"
elif isinstance(error, Exception):
error = FailedFuture(type(error), error, None)
else:
if not isinstance(error, IFailedFuture):
raise RuntimeError("reject requires an IFailedFuture or Exception")
future.set_exception(error.value)
def cancel(self, future):
future.cancel()
def create_failure(self, exception=None):
"""
This returns an object implementing IFailedFuture.
If exception is None (the default) we MUST be called within an
"except" block (such that sys.exc_info() returns useful
information).
"""
if exception:
return FailedFuture(type(exception), exception, None)
return FailedFuture(*sys.exc_info())
def add_callbacks(self, future, callback, errback):
"""
callback or errback may be None, but at least one must be
non-None.
"""
def done(f):
try:
res = f.result()
if callback:
callback(res)
except Exception:
if errback:
errback(create_failure())
return future.add_done_callback(done)
def gather(self, futures, consume_exceptions=True):
"""
This returns a Future that waits for all the Futures in the list
``futures``
:param futures: a list of Futures (or coroutines?)
:param consume_exceptions: if True, any errors are eaten and
returned in the result list.
"""
# from the asyncio docs: "If return_exceptions is True, exceptions
# in the tasks are treated the same as successful results, and
# gathered in the result list; otherwise, the first raised
# exception will be immediately propagated to the returned
# future."
return asyncio.gather(*futures, return_exceptions=consume_exceptions)
def sleep(self, delay):
"""
Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float
"""
return asyncio.ensure_future(asyncio.sleep(delay))
_default_api = _AsyncioApi(config)
using_twisted = _default_api.using_twisted
using_asyncio = _default_api.using_asyncio
sleep = _default_api.sleep
failure_message = _default_api.failure_message
failure_traceback = _default_api.failure_traceback
failure_format_traceback = _default_api.failure_format_traceback
create_future = _default_api.create_future
create_future_success = _default_api.create_future_success
create_future_error = _default_api.create_future_error
as_future = _default_api.as_future
is_future = _default_api.is_future
call_later = _default_api.call_later
make_batched_timer = _default_api.make_batched_timer
is_called = _default_api.is_called
resolve = _default_api.resolve
reject = _default_api.reject
cancel = _default_api.cancel
create_failure = _default_api.create_failure
add_callbacks = _default_api.add_callbacks
gather = _default_api.gather
sleep = _default_api.sleep
|
crossbario/txaio | txaio/aio.py | set_global_log_level | python | def set_global_log_level(level):
for logger in _loggers:
logger._set_log_level(level)
global _log_level
_log_level = level | Set the global log level on all loggers instantiated by txaio. | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/aio.py#L325-L332 | null | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from __future__ import absolute_import, print_function
import os
import sys
import time
import weakref
import functools
import traceback
import logging
import inspect
from datetime import datetime
from txaio.interfaces import IFailedFuture, ILogger, log_levels
from txaio._iotype import guess_stream_needs_encoding
from txaio._common import _BatchedTimer
from txaio import _Config
import six
try:
import asyncio
from asyncio import iscoroutine
from asyncio import Future
except ImportError:
# Trollius >= 0.3 was renamed
# noinspection PyUnresolvedReferences
import trollius as asyncio
from trollius import iscoroutine
from trollius import Future
try:
from types import AsyncGeneratorType # python 3.5+
except ImportError:
class AsyncGeneratorType(object):
pass
def _create_future_of_loop(loop):
return loop.create_future()
def _create_future_directly(loop=None):
return Future(loop=loop)
def _create_task_of_loop(res, loop):
return loop.create_task(res)
def _create_task_directly(res, loop=None):
return asyncio.Task(res, loop=loop)
if sys.version_info >= (3, 4, 2):
_create_task = _create_task_of_loop
if sys.version_info >= (3, 5, 2):
_create_future = _create_future_of_loop
else:
_create_future = _create_future_directly
else:
_create_task = _create_task_directly
_create_future = _create_future_directly
config = _Config()
def with_config(loop=None):
"""
:return: an instance of the txaio API with the given
configuration. This won't affect anything using the 'gloabl'
config nor other instances created using this function.
If you need to customize txaio configuration separately (e.g. to
use multiple event-loops in asyncio), you can take code like this:
import txaio
class FunTimes(object):
def something_async(self):
return txaio.call_later(1, lambda: 'some result')
and instead do this:
import txaio
class FunTimes(object):
txaio = txaio
def something_async(self):
# this will run in the local/new event loop created in the constructor
return self.txaio.call_later(1, lambda: 'some result')
fun0 = FunTimes()
fun1 = FunTimes()
fun1.txaio = txaio.with_config(loop=asyncio.new_event_loop())
So `fun1` will run its futures on the newly-created event loop,
while `fun0` will work just as it did before this `with_config`
method was introduced (after 2.6.2).
"""
cfg = _Config()
if loop is not None:
cfg.loop = loop
return _AsyncioApi(cfg)
# logging should probably all be folded into _AsyncioApi as well
_stderr, _stdout = sys.stderr, sys.stdout
_loggers = weakref.WeakSet() # weak-ref's of each logger we've created before start_logging()
_log_level = 'info' # re-set by start_logging
_started_logging = False
_categories = {}
def add_log_categories(categories):
_categories.update(categories)
class FailedFuture(IFailedFuture):
"""
This provides an object with any features from Twisted's Failure
that we might need in Autobahn classes that use FutureMixin.
We need to encapsulate information from exceptions so that
errbacks still have access to the traceback (in case they want to
print it out) outside of "except" blocks.
"""
def __init__(self, type_, value, traceback):
"""
These are the same parameters as returned from ``sys.exc_info()``
:param type_: exception type
:param value: the Exception instance
:param traceback: a traceback object
"""
self._type = type_
self._value = value
self._traceback = traceback
@property
def value(self):
return self._value
def __str__(self):
return str(self.value)
# logging API methods
def _log(logger, level, format=u'', **kwargs):
# Look for a log_category, switch it in if we have it
if "log_category" in kwargs and kwargs["log_category"] in _categories:
format = _categories.get(kwargs["log_category"])
kwargs['log_time'] = time.time()
kwargs['log_level'] = level
kwargs['log_format'] = format
# NOTE: turning kwargs into a single "argument which
# is a dict" on purpose, since a LogRecord only keeps
# args, not kwargs.
if level == 'trace':
level = 'debug'
kwargs['txaio_trace'] = True
msg = format.format(**kwargs)
getattr(logger._logger, level)(msg)
def _no_op(*args, **kw):
pass
class _TxaioLogWrapper(ILogger):
def __init__(self, logger):
self._logger = logger
self._set_log_level(_log_level)
def emit(self, level, *args, **kwargs):
func = getattr(self, level)
return func(*args, **kwargs)
def _set_log_level(self, level):
target_level = log_levels.index(level)
# this binds either _log or _no_op above to this instance,
# depending on the desired level.
for (idx, name) in enumerate(log_levels):
if idx <= target_level:
log_method = functools.partial(_log, self, name)
else:
log_method = _no_op
setattr(self, name, log_method)
self._log_level = level
class _TxaioFileHandler(logging.Handler, object):
def __init__(self, fileobj, **kw):
super(_TxaioFileHandler, self).__init__(**kw)
self._file = fileobj
self._encode = guess_stream_needs_encoding(fileobj)
def emit(self, record):
if isinstance(record.args, dict):
fmt = record.args.get(
'log_format',
record.args.get('log_message', u'')
)
message = fmt.format(**record.args)
dt = datetime.fromtimestamp(record.args.get('log_time', 0))
else:
message = record.getMessage()
if record.levelno == logging.ERROR and record.exc_info:
message += '\n'
for line in traceback.format_exception(*record.exc_info):
message = message + line
dt = datetime.fromtimestamp(record.created)
msg = u'{0} {1}{2}'.format(
dt.strftime("%Y-%m-%dT%H:%M:%S%z"),
message,
os.linesep
)
if self._encode:
msg = msg.encode('utf8')
self._file.write(msg)
def make_logger():
# we want the namespace to be the calling context of "make_logger"
# otherwise the root logger will be returned
cf = inspect.currentframe().f_back
if "self" in cf.f_locals:
# We're probably in a class init or method
cls = cf.f_locals["self"].__class__
namespace = '{0}.{1}'.format(cls.__module__, cls.__name__)
else:
namespace = cf.f_globals["__name__"]
if cf.f_code.co_name != "<module>":
# If it's not the module, and not in a class instance, add the code
# object's name.
namespace = namespace + "." + cf.f_code.co_name
logger = _TxaioLogWrapper(logging.getLogger(name=namespace))
# remember this so we can set their levels properly once
# start_logging is actually called
_loggers.add(logger)
return logger
def start_logging(out=_stdout, level='info'):
"""
Begin logging.
:param out: if provided, a file-like object to log to. By default, this is
stdout.
:param level: the maximum log-level to emit (a string)
"""
global _log_level, _loggers, _started_logging
if level not in log_levels:
raise RuntimeError(
"Invalid log level '{0}'; valid are: {1}".format(
level, ', '.join(log_levels)
)
)
if _started_logging:
return
_started_logging = True
_log_level = level
handler = _TxaioFileHandler(out)
logging.getLogger().addHandler(handler)
# note: Don't need to call basicConfig() or similar, because we've
# now added at least one handler to the root logger
logging.raiseExceptions = True # FIXME
level_to_stdlib = {
'critical': logging.CRITICAL,
'error': logging.ERROR,
'warn': logging.WARNING,
'info': logging.INFO,
'debug': logging.DEBUG,
'trace': logging.DEBUG,
}
logging.getLogger().setLevel(level_to_stdlib[level])
# make sure any loggers we created before now have their log-level
# set (any created after now will get it from _log_level
for logger in _loggers:
logger._set_log_level(level)
def get_global_log_level():
return _log_level
# asyncio API methods; the module-level functions are (now, for
# backwards-compat) exported from a default instance of this class
_unspecified = object()
class _AsyncioApi(object):
using_twisted = False
using_asyncio = True
def __init__(self, config):
if config.loop is None:
config.loop = asyncio.get_event_loop()
self._config = config
def failure_message(self, fail):
"""
:param fail: must be an IFailedFuture
returns a unicode error-message
"""
try:
return u'{0}: {1}'.format(
fail._value.__class__.__name__,
str(fail._value),
)
except Exception:
return u'Failed to produce failure message for "{0}"'.format(fail)
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail._traceback
def failure_format_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a string
"""
try:
f = six.StringIO()
traceback.print_exception(
fail._type,
fail.value,
fail._traceback,
file=f,
)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail)
def create_future(self, result=_unspecified, error=_unspecified, canceller=_unspecified):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = _create_future(loop=self._config.loop)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
# Twisted's only API for cancelling is to pass a
# single-argument callable to the Deferred constructor, so
# txaio apes that here for asyncio. The argument is the Future
# that has been cancelled.
if canceller is not _unspecified:
def done(f):
try:
f.exception()
except asyncio.CancelledError:
canceller(f)
f.add_done_callback(done)
return f
def create_future_success(self, result):
return self.create_future(result=result)
def create_future_error(self, error=None):
f = self.create_future()
reject(f, error)
return f
def as_future(self, fun, *args, **kwargs):
try:
res = fun(*args, **kwargs)
except Exception:
return create_future_error(create_failure())
else:
if isinstance(res, Future):
return res
elif iscoroutine(res):
return _create_task(res, loop=self._config.loop)
elif isinstance(res, AsyncGeneratorType):
raise RuntimeError(
"as_future() received an async generator function; does "
"'{}' use 'yield' when you meant 'await'?".format(
str(fun)
)
)
else:
return create_future_success(res)
def is_future(self, obj):
return iscoroutine(obj) or isinstance(obj, Future)
def call_later(self, delay, fun, *args, **kwargs):
# loop.call_later doesn't support kwargs
real_call = functools.partial(fun, *args, **kwargs)
return self._config.loop.call_later(delay, real_call)
def make_batched_timer(self, bucket_seconds, chunk_size=100):
"""
Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor.
"""
def get_seconds():
return self._config.loop.time()
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=self.call_later,
)
def is_called(self, future):
return future.done()
def resolve(self, future, result=None):
future.set_result(result)
def reject(self, future, error=None):
if error is None:
error = create_failure() # will be error if we're not in an "except"
elif isinstance(error, Exception):
error = FailedFuture(type(error), error, None)
else:
if not isinstance(error, IFailedFuture):
raise RuntimeError("reject requires an IFailedFuture or Exception")
future.set_exception(error.value)
def cancel(self, future):
future.cancel()
def create_failure(self, exception=None):
"""
This returns an object implementing IFailedFuture.
If exception is None (the default) we MUST be called within an
"except" block (such that sys.exc_info() returns useful
information).
"""
if exception:
return FailedFuture(type(exception), exception, None)
return FailedFuture(*sys.exc_info())
def add_callbacks(self, future, callback, errback):
"""
callback or errback may be None, but at least one must be
non-None.
"""
def done(f):
try:
res = f.result()
if callback:
callback(res)
except Exception:
if errback:
errback(create_failure())
return future.add_done_callback(done)
def gather(self, futures, consume_exceptions=True):
"""
This returns a Future that waits for all the Futures in the list
``futures``
:param futures: a list of Futures (or coroutines?)
:param consume_exceptions: if True, any errors are eaten and
returned in the result list.
"""
# from the asyncio docs: "If return_exceptions is True, exceptions
# in the tasks are treated the same as successful results, and
# gathered in the result list; otherwise, the first raised
# exception will be immediately propagated to the returned
# future."
return asyncio.gather(*futures, return_exceptions=consume_exceptions)
def sleep(self, delay):
"""
Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float
"""
return asyncio.ensure_future(asyncio.sleep(delay))
_default_api = _AsyncioApi(config)
using_twisted = _default_api.using_twisted
using_asyncio = _default_api.using_asyncio
sleep = _default_api.sleep
failure_message = _default_api.failure_message
failure_traceback = _default_api.failure_traceback
failure_format_traceback = _default_api.failure_format_traceback
create_future = _default_api.create_future
create_future_success = _default_api.create_future_success
create_future_error = _default_api.create_future_error
as_future = _default_api.as_future
is_future = _default_api.is_future
call_later = _default_api.call_later
make_batched_timer = _default_api.make_batched_timer
is_called = _default_api.is_called
resolve = _default_api.resolve
reject = _default_api.reject
cancel = _default_api.cancel
create_failure = _default_api.create_failure
add_callbacks = _default_api.add_callbacks
gather = _default_api.gather
sleep = _default_api.sleep
|
crossbario/txaio | txaio/aio.py | _AsyncioApi.failure_message | python | def failure_message(self, fail):
try:
return u'{0}: {1}'.format(
fail._value.__class__.__name__,
str(fail._value),
)
except Exception:
return u'Failed to produce failure message for "{0}"'.format(fail) | :param fail: must be an IFailedFuture
returns a unicode error-message | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/aio.py#L355-L366 | null | class _AsyncioApi(object):
using_twisted = False
using_asyncio = True
def __init__(self, config):
if config.loop is None:
config.loop = asyncio.get_event_loop()
self._config = config
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail._traceback
def failure_format_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a string
"""
try:
f = six.StringIO()
traceback.print_exception(
fail._type,
fail.value,
fail._traceback,
file=f,
)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail)
def create_future(self, result=_unspecified, error=_unspecified, canceller=_unspecified):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = _create_future(loop=self._config.loop)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
# Twisted's only API for cancelling is to pass a
# single-argument callable to the Deferred constructor, so
# txaio apes that here for asyncio. The argument is the Future
# that has been cancelled.
if canceller is not _unspecified:
def done(f):
try:
f.exception()
except asyncio.CancelledError:
canceller(f)
f.add_done_callback(done)
return f
def create_future_success(self, result):
return self.create_future(result=result)
def create_future_error(self, error=None):
f = self.create_future()
reject(f, error)
return f
def as_future(self, fun, *args, **kwargs):
try:
res = fun(*args, **kwargs)
except Exception:
return create_future_error(create_failure())
else:
if isinstance(res, Future):
return res
elif iscoroutine(res):
return _create_task(res, loop=self._config.loop)
elif isinstance(res, AsyncGeneratorType):
raise RuntimeError(
"as_future() received an async generator function; does "
"'{}' use 'yield' when you meant 'await'?".format(
str(fun)
)
)
else:
return create_future_success(res)
def is_future(self, obj):
return iscoroutine(obj) or isinstance(obj, Future)
def call_later(self, delay, fun, *args, **kwargs):
# loop.call_later doesn't support kwargs
real_call = functools.partial(fun, *args, **kwargs)
return self._config.loop.call_later(delay, real_call)
def make_batched_timer(self, bucket_seconds, chunk_size=100):
"""
Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor.
"""
def get_seconds():
return self._config.loop.time()
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=self.call_later,
)
def is_called(self, future):
return future.done()
def resolve(self, future, result=None):
future.set_result(result)
def reject(self, future, error=None):
if error is None:
error = create_failure() # will be error if we're not in an "except"
elif isinstance(error, Exception):
error = FailedFuture(type(error), error, None)
else:
if not isinstance(error, IFailedFuture):
raise RuntimeError("reject requires an IFailedFuture or Exception")
future.set_exception(error.value)
def cancel(self, future):
future.cancel()
def create_failure(self, exception=None):
"""
This returns an object implementing IFailedFuture.
If exception is None (the default) we MUST be called within an
"except" block (such that sys.exc_info() returns useful
information).
"""
if exception:
return FailedFuture(type(exception), exception, None)
return FailedFuture(*sys.exc_info())
def add_callbacks(self, future, callback, errback):
"""
callback or errback may be None, but at least one must be
non-None.
"""
def done(f):
try:
res = f.result()
if callback:
callback(res)
except Exception:
if errback:
errback(create_failure())
return future.add_done_callback(done)
def gather(self, futures, consume_exceptions=True):
"""
This returns a Future that waits for all the Futures in the list
``futures``
:param futures: a list of Futures (or coroutines?)
:param consume_exceptions: if True, any errors are eaten and
returned in the result list.
"""
# from the asyncio docs: "If return_exceptions is True, exceptions
# in the tasks are treated the same as successful results, and
# gathered in the result list; otherwise, the first raised
# exception will be immediately propagated to the returned
# future."
return asyncio.gather(*futures, return_exceptions=consume_exceptions)
def sleep(self, delay):
"""
Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float
"""
return asyncio.ensure_future(asyncio.sleep(delay))
|
crossbario/txaio | txaio/aio.py | _AsyncioApi.failure_format_traceback | python | def failure_format_traceback(self, fail):
try:
f = six.StringIO()
traceback.print_exception(
fail._type,
fail.value,
fail._traceback,
file=f,
)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail) | :param fail: must be an IFailedFuture
returns a string | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/aio.py#L375-L390 | null | class _AsyncioApi(object):
using_twisted = False
using_asyncio = True
def __init__(self, config):
if config.loop is None:
config.loop = asyncio.get_event_loop()
self._config = config
def failure_message(self, fail):
"""
:param fail: must be an IFailedFuture
returns a unicode error-message
"""
try:
return u'{0}: {1}'.format(
fail._value.__class__.__name__,
str(fail._value),
)
except Exception:
return u'Failed to produce failure message for "{0}"'.format(fail)
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail._traceback
def create_future(self, result=_unspecified, error=_unspecified, canceller=_unspecified):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = _create_future(loop=self._config.loop)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
# Twisted's only API for cancelling is to pass a
# single-argument callable to the Deferred constructor, so
# txaio apes that here for asyncio. The argument is the Future
# that has been cancelled.
if canceller is not _unspecified:
def done(f):
try:
f.exception()
except asyncio.CancelledError:
canceller(f)
f.add_done_callback(done)
return f
def create_future_success(self, result):
return self.create_future(result=result)
def create_future_error(self, error=None):
f = self.create_future()
reject(f, error)
return f
def as_future(self, fun, *args, **kwargs):
try:
res = fun(*args, **kwargs)
except Exception:
return create_future_error(create_failure())
else:
if isinstance(res, Future):
return res
elif iscoroutine(res):
return _create_task(res, loop=self._config.loop)
elif isinstance(res, AsyncGeneratorType):
raise RuntimeError(
"as_future() received an async generator function; does "
"'{}' use 'yield' when you meant 'await'?".format(
str(fun)
)
)
else:
return create_future_success(res)
def is_future(self, obj):
return iscoroutine(obj) or isinstance(obj, Future)
def call_later(self, delay, fun, *args, **kwargs):
# loop.call_later doesn't support kwargs
real_call = functools.partial(fun, *args, **kwargs)
return self._config.loop.call_later(delay, real_call)
def make_batched_timer(self, bucket_seconds, chunk_size=100):
"""
Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor.
"""
def get_seconds():
return self._config.loop.time()
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=self.call_later,
)
def is_called(self, future):
return future.done()
def resolve(self, future, result=None):
future.set_result(result)
def reject(self, future, error=None):
if error is None:
error = create_failure() # will be error if we're not in an "except"
elif isinstance(error, Exception):
error = FailedFuture(type(error), error, None)
else:
if not isinstance(error, IFailedFuture):
raise RuntimeError("reject requires an IFailedFuture or Exception")
future.set_exception(error.value)
def cancel(self, future):
future.cancel()
def create_failure(self, exception=None):
"""
This returns an object implementing IFailedFuture.
If exception is None (the default) we MUST be called within an
"except" block (such that sys.exc_info() returns useful
information).
"""
if exception:
return FailedFuture(type(exception), exception, None)
return FailedFuture(*sys.exc_info())
def add_callbacks(self, future, callback, errback):
"""
callback or errback may be None, but at least one must be
non-None.
"""
def done(f):
try:
res = f.result()
if callback:
callback(res)
except Exception:
if errback:
errback(create_failure())
return future.add_done_callback(done)
def gather(self, futures, consume_exceptions=True):
"""
This returns a Future that waits for all the Futures in the list
``futures``
:param futures: a list of Futures (or coroutines?)
:param consume_exceptions: if True, any errors are eaten and
returned in the result list.
"""
# from the asyncio docs: "If return_exceptions is True, exceptions
# in the tasks are treated the same as successful results, and
# gathered in the result list; otherwise, the first raised
# exception will be immediately propagated to the returned
# future."
return asyncio.gather(*futures, return_exceptions=consume_exceptions)
def sleep(self, delay):
"""
Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float
"""
return asyncio.ensure_future(asyncio.sleep(delay))
|
crossbario/txaio | txaio/aio.py | _AsyncioApi.make_batched_timer | python | def make_batched_timer(self, bucket_seconds, chunk_size=100):
def get_seconds():
return self._config.loop.time()
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=self.call_later,
) | Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor. | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/aio.py#L452-L474 | null | class _AsyncioApi(object):
using_twisted = False
using_asyncio = True
def __init__(self, config):
if config.loop is None:
config.loop = asyncio.get_event_loop()
self._config = config
def failure_message(self, fail):
"""
:param fail: must be an IFailedFuture
returns a unicode error-message
"""
try:
return u'{0}: {1}'.format(
fail._value.__class__.__name__,
str(fail._value),
)
except Exception:
return u'Failed to produce failure message for "{0}"'.format(fail)
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail._traceback
def failure_format_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a string
"""
try:
f = six.StringIO()
traceback.print_exception(
fail._type,
fail.value,
fail._traceback,
file=f,
)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail)
def create_future(self, result=_unspecified, error=_unspecified, canceller=_unspecified):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = _create_future(loop=self._config.loop)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
# Twisted's only API for cancelling is to pass a
# single-argument callable to the Deferred constructor, so
# txaio apes that here for asyncio. The argument is the Future
# that has been cancelled.
if canceller is not _unspecified:
def done(f):
try:
f.exception()
except asyncio.CancelledError:
canceller(f)
f.add_done_callback(done)
return f
def create_future_success(self, result):
return self.create_future(result=result)
def create_future_error(self, error=None):
f = self.create_future()
reject(f, error)
return f
def as_future(self, fun, *args, **kwargs):
try:
res = fun(*args, **kwargs)
except Exception:
return create_future_error(create_failure())
else:
if isinstance(res, Future):
return res
elif iscoroutine(res):
return _create_task(res, loop=self._config.loop)
elif isinstance(res, AsyncGeneratorType):
raise RuntimeError(
"as_future() received an async generator function; does "
"'{}' use 'yield' when you meant 'await'?".format(
str(fun)
)
)
else:
return create_future_success(res)
def is_future(self, obj):
return iscoroutine(obj) or isinstance(obj, Future)
def call_later(self, delay, fun, *args, **kwargs):
# loop.call_later doesn't support kwargs
real_call = functools.partial(fun, *args, **kwargs)
return self._config.loop.call_later(delay, real_call)
def is_called(self, future):
return future.done()
def resolve(self, future, result=None):
future.set_result(result)
def reject(self, future, error=None):
if error is None:
error = create_failure() # will be error if we're not in an "except"
elif isinstance(error, Exception):
error = FailedFuture(type(error), error, None)
else:
if not isinstance(error, IFailedFuture):
raise RuntimeError("reject requires an IFailedFuture or Exception")
future.set_exception(error.value)
def cancel(self, future):
future.cancel()
def create_failure(self, exception=None):
"""
This returns an object implementing IFailedFuture.
If exception is None (the default) we MUST be called within an
"except" block (such that sys.exc_info() returns useful
information).
"""
if exception:
return FailedFuture(type(exception), exception, None)
return FailedFuture(*sys.exc_info())
def add_callbacks(self, future, callback, errback):
"""
callback or errback may be None, but at least one must be
non-None.
"""
def done(f):
try:
res = f.result()
if callback:
callback(res)
except Exception:
if errback:
errback(create_failure())
return future.add_done_callback(done)
def gather(self, futures, consume_exceptions=True):
"""
This returns a Future that waits for all the Futures in the list
``futures``
:param futures: a list of Futures (or coroutines?)
:param consume_exceptions: if True, any errors are eaten and
returned in the result list.
"""
# from the asyncio docs: "If return_exceptions is True, exceptions
# in the tasks are treated the same as successful results, and
# gathered in the result list; otherwise, the first raised
# exception will be immediately propagated to the returned
# future."
return asyncio.gather(*futures, return_exceptions=consume_exceptions)
def sleep(self, delay):
"""
Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float
"""
return asyncio.ensure_future(asyncio.sleep(delay))
|
crossbario/txaio | txaio/aio.py | _AsyncioApi.create_failure | python | def create_failure(self, exception=None):
if exception:
return FailedFuture(type(exception), exception, None)
return FailedFuture(*sys.exc_info()) | This returns an object implementing IFailedFuture.
If exception is None (the default) we MUST be called within an
"except" block (such that sys.exc_info() returns useful
information). | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/aio.py#L495-L505 | null | class _AsyncioApi(object):
using_twisted = False
using_asyncio = True
def __init__(self, config):
if config.loop is None:
config.loop = asyncio.get_event_loop()
self._config = config
def failure_message(self, fail):
"""
:param fail: must be an IFailedFuture
returns a unicode error-message
"""
try:
return u'{0}: {1}'.format(
fail._value.__class__.__name__,
str(fail._value),
)
except Exception:
return u'Failed to produce failure message for "{0}"'.format(fail)
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail._traceback
def failure_format_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a string
"""
try:
f = six.StringIO()
traceback.print_exception(
fail._type,
fail.value,
fail._traceback,
file=f,
)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail)
def create_future(self, result=_unspecified, error=_unspecified, canceller=_unspecified):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = _create_future(loop=self._config.loop)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
# Twisted's only API for cancelling is to pass a
# single-argument callable to the Deferred constructor, so
# txaio apes that here for asyncio. The argument is the Future
# that has been cancelled.
if canceller is not _unspecified:
def done(f):
try:
f.exception()
except asyncio.CancelledError:
canceller(f)
f.add_done_callback(done)
return f
def create_future_success(self, result):
return self.create_future(result=result)
def create_future_error(self, error=None):
f = self.create_future()
reject(f, error)
return f
def as_future(self, fun, *args, **kwargs):
try:
res = fun(*args, **kwargs)
except Exception:
return create_future_error(create_failure())
else:
if isinstance(res, Future):
return res
elif iscoroutine(res):
return _create_task(res, loop=self._config.loop)
elif isinstance(res, AsyncGeneratorType):
raise RuntimeError(
"as_future() received an async generator function; does "
"'{}' use 'yield' when you meant 'await'?".format(
str(fun)
)
)
else:
return create_future_success(res)
def is_future(self, obj):
return iscoroutine(obj) or isinstance(obj, Future)
def call_later(self, delay, fun, *args, **kwargs):
# loop.call_later doesn't support kwargs
real_call = functools.partial(fun, *args, **kwargs)
return self._config.loop.call_later(delay, real_call)
def make_batched_timer(self, bucket_seconds, chunk_size=100):
"""
Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor.
"""
def get_seconds():
return self._config.loop.time()
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=self.call_later,
)
def is_called(self, future):
return future.done()
def resolve(self, future, result=None):
future.set_result(result)
def reject(self, future, error=None):
if error is None:
error = create_failure() # will be error if we're not in an "except"
elif isinstance(error, Exception):
error = FailedFuture(type(error), error, None)
else:
if not isinstance(error, IFailedFuture):
raise RuntimeError("reject requires an IFailedFuture or Exception")
future.set_exception(error.value)
def cancel(self, future):
future.cancel()
def add_callbacks(self, future, callback, errback):
"""
callback or errback may be None, but at least one must be
non-None.
"""
def done(f):
try:
res = f.result()
if callback:
callback(res)
except Exception:
if errback:
errback(create_failure())
return future.add_done_callback(done)
def gather(self, futures, consume_exceptions=True):
"""
This returns a Future that waits for all the Futures in the list
``futures``
:param futures: a list of Futures (or coroutines?)
:param consume_exceptions: if True, any errors are eaten and
returned in the result list.
"""
# from the asyncio docs: "If return_exceptions is True, exceptions
# in the tasks are treated the same as successful results, and
# gathered in the result list; otherwise, the first raised
# exception will be immediately propagated to the returned
# future."
return asyncio.gather(*futures, return_exceptions=consume_exceptions)
def sleep(self, delay):
"""
Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float
"""
return asyncio.ensure_future(asyncio.sleep(delay))
|
crossbario/txaio | txaio/aio.py | _AsyncioApi.add_callbacks | python | def add_callbacks(self, future, callback, errback):
def done(f):
try:
res = f.result()
if callback:
callback(res)
except Exception:
if errback:
errback(create_failure())
return future.add_done_callback(done) | callback or errback may be None, but at least one must be
non-None. | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/aio.py#L507-L520 | null | class _AsyncioApi(object):
using_twisted = False
using_asyncio = True
def __init__(self, config):
if config.loop is None:
config.loop = asyncio.get_event_loop()
self._config = config
def failure_message(self, fail):
"""
:param fail: must be an IFailedFuture
returns a unicode error-message
"""
try:
return u'{0}: {1}'.format(
fail._value.__class__.__name__,
str(fail._value),
)
except Exception:
return u'Failed to produce failure message for "{0}"'.format(fail)
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail._traceback
def failure_format_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a string
"""
try:
f = six.StringIO()
traceback.print_exception(
fail._type,
fail.value,
fail._traceback,
file=f,
)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail)
def create_future(self, result=_unspecified, error=_unspecified, canceller=_unspecified):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = _create_future(loop=self._config.loop)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
# Twisted's only API for cancelling is to pass a
# single-argument callable to the Deferred constructor, so
# txaio apes that here for asyncio. The argument is the Future
# that has been cancelled.
if canceller is not _unspecified:
def done(f):
try:
f.exception()
except asyncio.CancelledError:
canceller(f)
f.add_done_callback(done)
return f
def create_future_success(self, result):
return self.create_future(result=result)
def create_future_error(self, error=None):
f = self.create_future()
reject(f, error)
return f
def as_future(self, fun, *args, **kwargs):
try:
res = fun(*args, **kwargs)
except Exception:
return create_future_error(create_failure())
else:
if isinstance(res, Future):
return res
elif iscoroutine(res):
return _create_task(res, loop=self._config.loop)
elif isinstance(res, AsyncGeneratorType):
raise RuntimeError(
"as_future() received an async generator function; does "
"'{}' use 'yield' when you meant 'await'?".format(
str(fun)
)
)
else:
return create_future_success(res)
def is_future(self, obj):
return iscoroutine(obj) or isinstance(obj, Future)
def call_later(self, delay, fun, *args, **kwargs):
# loop.call_later doesn't support kwargs
real_call = functools.partial(fun, *args, **kwargs)
return self._config.loop.call_later(delay, real_call)
def make_batched_timer(self, bucket_seconds, chunk_size=100):
"""
Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor.
"""
def get_seconds():
return self._config.loop.time()
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=self.call_later,
)
def is_called(self, future):
return future.done()
def resolve(self, future, result=None):
future.set_result(result)
def reject(self, future, error=None):
if error is None:
error = create_failure() # will be error if we're not in an "except"
elif isinstance(error, Exception):
error = FailedFuture(type(error), error, None)
else:
if not isinstance(error, IFailedFuture):
raise RuntimeError("reject requires an IFailedFuture or Exception")
future.set_exception(error.value)
def cancel(self, future):
future.cancel()
def create_failure(self, exception=None):
"""
This returns an object implementing IFailedFuture.
If exception is None (the default) we MUST be called within an
"except" block (such that sys.exc_info() returns useful
information).
"""
if exception:
return FailedFuture(type(exception), exception, None)
return FailedFuture(*sys.exc_info())
def gather(self, futures, consume_exceptions=True):
"""
This returns a Future that waits for all the Futures in the list
``futures``
:param futures: a list of Futures (or coroutines?)
:param consume_exceptions: if True, any errors are eaten and
returned in the result list.
"""
# from the asyncio docs: "If return_exceptions is True, exceptions
# in the tasks are treated the same as successful results, and
# gathered in the result list; otherwise, the first raised
# exception will be immediately propagated to the returned
# future."
return asyncio.gather(*futures, return_exceptions=consume_exceptions)
def sleep(self, delay):
"""
Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float
"""
return asyncio.ensure_future(asyncio.sleep(delay))
|
crossbario/txaio | txaio/aio.py | _AsyncioApi.gather | python | def gather(self, futures, consume_exceptions=True):
# from the asyncio docs: "If return_exceptions is True, exceptions
# in the tasks are treated the same as successful results, and
# gathered in the result list; otherwise, the first raised
# exception will be immediately propagated to the returned
# future."
return asyncio.gather(*futures, return_exceptions=consume_exceptions) | This returns a Future that waits for all the Futures in the list
``futures``
:param futures: a list of Futures (or coroutines?)
:param consume_exceptions: if True, any errors are eaten and
returned in the result list. | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/aio.py#L522-L538 | null | class _AsyncioApi(object):
using_twisted = False
using_asyncio = True
def __init__(self, config):
if config.loop is None:
config.loop = asyncio.get_event_loop()
self._config = config
def failure_message(self, fail):
"""
:param fail: must be an IFailedFuture
returns a unicode error-message
"""
try:
return u'{0}: {1}'.format(
fail._value.__class__.__name__,
str(fail._value),
)
except Exception:
return u'Failed to produce failure message for "{0}"'.format(fail)
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail._traceback
def failure_format_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a string
"""
try:
f = six.StringIO()
traceback.print_exception(
fail._type,
fail.value,
fail._traceback,
file=f,
)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail)
def create_future(self, result=_unspecified, error=_unspecified, canceller=_unspecified):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = _create_future(loop=self._config.loop)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
# Twisted's only API for cancelling is to pass a
# single-argument callable to the Deferred constructor, so
# txaio apes that here for asyncio. The argument is the Future
# that has been cancelled.
if canceller is not _unspecified:
def done(f):
try:
f.exception()
except asyncio.CancelledError:
canceller(f)
f.add_done_callback(done)
return f
def create_future_success(self, result):
return self.create_future(result=result)
def create_future_error(self, error=None):
f = self.create_future()
reject(f, error)
return f
def as_future(self, fun, *args, **kwargs):
try:
res = fun(*args, **kwargs)
except Exception:
return create_future_error(create_failure())
else:
if isinstance(res, Future):
return res
elif iscoroutine(res):
return _create_task(res, loop=self._config.loop)
elif isinstance(res, AsyncGeneratorType):
raise RuntimeError(
"as_future() received an async generator function; does "
"'{}' use 'yield' when you meant 'await'?".format(
str(fun)
)
)
else:
return create_future_success(res)
def is_future(self, obj):
return iscoroutine(obj) or isinstance(obj, Future)
def call_later(self, delay, fun, *args, **kwargs):
# loop.call_later doesn't support kwargs
real_call = functools.partial(fun, *args, **kwargs)
return self._config.loop.call_later(delay, real_call)
def make_batched_timer(self, bucket_seconds, chunk_size=100):
"""
Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor.
"""
def get_seconds():
return self._config.loop.time()
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=self.call_later,
)
def is_called(self, future):
return future.done()
def resolve(self, future, result=None):
future.set_result(result)
def reject(self, future, error=None):
if error is None:
error = create_failure() # will be error if we're not in an "except"
elif isinstance(error, Exception):
error = FailedFuture(type(error), error, None)
else:
if not isinstance(error, IFailedFuture):
raise RuntimeError("reject requires an IFailedFuture or Exception")
future.set_exception(error.value)
def cancel(self, future):
future.cancel()
def create_failure(self, exception=None):
"""
This returns an object implementing IFailedFuture.
If exception is None (the default) we MUST be called within an
"except" block (such that sys.exc_info() returns useful
information).
"""
if exception:
return FailedFuture(type(exception), exception, None)
return FailedFuture(*sys.exc_info())
def add_callbacks(self, future, callback, errback):
"""
callback or errback may be None, but at least one must be
non-None.
"""
def done(f):
try:
res = f.result()
if callback:
callback(res)
except Exception:
if errback:
errback(create_failure())
return future.add_done_callback(done)
def sleep(self, delay):
"""
Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float
"""
return asyncio.ensure_future(asyncio.sleep(delay))
|
crossbario/txaio | txaio/__init__.py | _use_framework | python | def _use_framework(module):
import txaio
for method_name in __all__:
if method_name in ['use_twisted', 'use_asyncio']:
continue
setattr(txaio, method_name,
getattr(module, method_name)) | Internal helper, to set this modules methods to a specified
framework helper-methods. | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/__init__.py#L130-L140 | null | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from __future__ import absolute_import
from txaio._version import __version__
from txaio.interfaces import IFailedFuture, ILogger
version = __version__
# This is the API
# see tx.py for Twisted implementation
# see aio.py for asyncio/trollius implementation
class _Config(object):
"""
This holds all valid configuration options, accessed as
class-level variables. For example, if you were using asyncio:
.. sourcecode:: python
txaio.config.loop = asyncio.get_event_loop()
``loop`` is populated automatically (while importing one of the
framework-specific libraries) but can be changed before any call
into this library. Currently, it's only used by :meth:`call_later`
If using asyncio, you must set this to an event-loop (by default,
we use asyncio.get_event_loop). If using Twisted, set this to a
reactor instance (by default we "from twisted.internet import
reactor" on the first call to call_later)
"""
#: the event-loop object to use
loop = None
__all__ = (
'with_config', # allow mutliple custom configurations at once
'using_twisted', # True if we're using Twisted
'using_asyncio', # True if we're using asyncio
'use_twisted', # sets the library to use Twisted, or exception
'use_asyncio', # sets the library to use asyncio, or exception
'config', # the config instance, access via attributes
'create_future', # create a Future (can be already resolved/errored)
'create_future_success',
'create_future_error',
'create_failure', # return an object implementing IFailedFuture
'as_future', # call a method, and always return a Future
'is_future', # True for Deferreds in tx and Futures, @coroutines in asyncio
'reject', # errback a Future
'resolve', # callback a Future
'cancel', # cancel a Future
'add_callbacks', # add callback and/or errback
'gather', # return a Future waiting for several other Futures
'is_called', # True if the Future has a result
'call_later', # call the callback after the given delay seconds
'failure_message', # a printable error-message from a IFailedFuture
'failure_traceback', # returns a traceback instance from an IFailedFuture
'failure_format_traceback', # a string, the formatted traceback
'make_batched_timer', # create BatchedTimer/IBatchedTimer instances
'make_logger', # creates an object implementing ILogger
'start_logging', # initializes logging (may grab stdin at this point)
'set_global_log_level', # Set the global log level
'get_global_log_level', # Get the global log level
'add_log_categories',
'IFailedFuture', # describes API for arg to errback()s
'ILogger', # API for logging
'sleep', # little helper for inline sleeping
)
_explicit_framework = None
def use_twisted():
global _explicit_framework
if _explicit_framework is not None and _explicit_framework != 'twisted':
raise RuntimeError("Explicitly using '{}' already".format(_explicit_framework))
_explicit_framework = 'twisted'
from txaio import tx
_use_framework(tx)
import txaio
txaio.using_twisted = True
txaio.using_asyncio = False
def use_asyncio():
global _explicit_framework
if _explicit_framework is not None and _explicit_framework != 'asyncio':
raise RuntimeError("Explicitly using '{}' already".format(_explicit_framework))
_explicit_framework = 'asyncio'
from txaio import aio
_use_framework(aio)
import txaio
txaio.using_twisted = False
txaio.using_asyncio = True
# use the "un-framework", which is neither asyncio nor twisted and
# just throws an exception -- this forces you to call .use_twisted()
# or .use_asyncio() to use the library.
from txaio import _unframework # noqa
_use_framework(_unframework)
|
crossbario/txaio | txaio/tx.py | start_logging | python | def start_logging(out=_stdout, level='info'):
global _loggers, _observer, _log_level, _started_logging
if level not in log_levels:
raise RuntimeError(
"Invalid log level '{0}'; valid are: {1}".format(
level, ', '.join(log_levels)
)
)
if _started_logging:
return
_started_logging = True
_log_level = level
set_global_log_level(_log_level)
if out:
_observer = _LogObserver(out)
if _NEW_LOGGER:
_observers = []
if _observer:
_observers.append(_observer)
globalLogBeginner.beginLoggingTo(_observers)
else:
assert out, "out needs to be given a value if using Twisteds before 15.2"
from twisted.python import log
log.startLogging(out) | Start logging to the file-like object in ``out``. By default, this
is stdout. | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/tx.py#L332-L365 | null | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from __future__ import absolute_import, division, print_function
import os
import sys
import weakref
import inspect
from functools import partial
from twisted.python.failure import Failure
from twisted.internet.defer import maybeDeferred, Deferred, DeferredList
from twisted.internet.defer import succeed, fail
from twisted.internet.interfaces import IReactorTime
from zope.interface import provider
from txaio.interfaces import IFailedFuture, ILogger, log_levels
from txaio._iotype import guess_stream_needs_encoding
from txaio import _Config
from txaio._common import _BatchedTimer
import six
PY3_CORO = False
if six.PY3:
try:
from twisted.internet.defer import ensureDeferred
from asyncio import iscoroutinefunction
PY3_CORO = True
except ImportError:
pass
using_twisted = True
using_asyncio = False
config = _Config()
_stderr, _stdout = sys.stderr, sys.stdout
# some book-keeping variables here. _observer is used as a global by
# the "backwards compatible" (Twisted < 15) loggers. The _loggers object
# is a weak-ref set; we add Logger instances to this *until* such
# time as start_logging is called (with the desired log-level) and
# then we call _set_log_level on each instance. After that,
# Logger's ctor uses _log_level directly.
_observer = None # for Twisted legacy logging support; see below
_loggers = weakref.WeakSet() # weak-references of each logger we've created
_log_level = 'info' # global log level; possibly changed in start_logging()
_started_logging = False
_categories = {}
IFailedFuture.register(Failure)
_NEW_LOGGER = False
try:
# Twisted 15+
from twisted.logger import Logger as _Logger, formatEvent, ILogObserver
from twisted.logger import globalLogBeginner, formatTime, LogLevel
ILogger.register(_Logger)
_NEW_LOGGER = True
except ImportError:
# we still support Twisted 12 and 13, which doesn't have new-logger
from zope.interface import Interface
from datetime import datetime
import time
# provide our own simple versions of what Twisted new-logger does
class ILogObserver(Interface):
pass
def formatTime(t): # noqa
dt = datetime.fromtimestamp(t)
return six.u(dt.strftime("%Y-%m-%dT%H:%M:%S%z"))
def formatEvent(event): # noqa
msg = event['log_format']
return msg.format(**event)
class LogLevel:
critical = 'critical'
error = 'error'
warn = 'warn'
info = 'info'
debug = 'debug'
trace = 'trace'
@classmethod
def lookupByName(cls, name): # noqa
return getattr(cls, name)
class _Logger(ILogger):
def __init__(self, **kwargs):
self.namespace = kwargs.get('namespace', None)
def emit(self, level, format='', **kwargs):
kwargs['log_time'] = time.time()
kwargs['log_level'] = level
kwargs['log_format'] = format
kwargs['log_namespace'] = self.namespace
# NOTE: the other loggers are ignoring any log messages
# before start_logging() as well
if _observer:
_observer(kwargs)
def _no_op(*args, **kwargs):
pass
def add_log_categories(categories):
_categories.update(categories)
def with_config(loop=None):
global config
if loop is not None:
if config.loop is not None and config.loop is not loop:
raise RuntimeError(
"Twisted has only a single, global reactor. You passed in "
"a reactor different from the one already configured "
"in txaio.config.loop"
)
return _TxApi(config)
# NOTE: beware that twisted.logger._logger.Logger copies itself via an
# overriden __get__ method when used as recommended as a class
# descriptor. So, we override __get__ to just return ``self`` which
# means ``log_source`` will be wrong, but we don't document that as a
# key that you can depend on anyway :/
class Logger(object):
def __init__(self, level=None, logger=None, namespace=None, observer=None):
assert logger, "Should not be instantiated directly."
self._logger = logger(observer=observer, namespace=namespace)
self._log_level_set_explicitly = False
if level:
self.set_log_level(level)
else:
self._set_log_level(_log_level)
_loggers.add(self)
def __get__(self, oself, type=None):
# this causes the Logger to lie about the "source=", but
# otherwise we create a new Logger instance every time we do
# "self.log.info()" if we use it like:
# class Foo:
# log = make_logger
return self
def _log(self, level, *args, **kwargs):
# Look for a log_category, switch it in if we have it
if "log_category" in kwargs and kwargs["log_category"] in _categories:
args = tuple()
kwargs["format"] = _categories.get(kwargs["log_category"])
self._logger.emit(level, *args, **kwargs)
def emit(self, level, *args, **kwargs):
if log_levels.index(self._log_level) < log_levels.index(level):
return
if level == "trace":
return self._trace(*args, **kwargs)
level = LogLevel.lookupByName(level)
return self._log(level, *args, **kwargs)
def set_log_level(self, level, keep=True):
"""
Set the log level. If keep is True, then it will not change along with
global log changes.
"""
self._set_log_level(level)
self._log_level_set_explicitly = keep
def _set_log_level(self, level):
# up to the desired level, we don't do anything, as we're a
# "real" Twisted new-logger; for methods *after* the desired
# level, we bind to the no_op method
desired_index = log_levels.index(level)
for (idx, name) in enumerate(log_levels):
if name == 'none':
continue
if idx > desired_index:
current = getattr(self, name, None)
if not current == _no_op or current is None:
setattr(self, name, _no_op)
if name == 'error':
setattr(self, 'failure', _no_op)
else:
if getattr(self, name, None) in (_no_op, None):
if name == 'trace':
setattr(self, "trace", self._trace)
else:
setattr(self, name,
partial(self._log, LogLevel.lookupByName(name)))
if name == 'error':
setattr(self, "failure", self._failure)
self._log_level = level
def _failure(self, format=None, *args, **kw):
return self._logger.failure(format, *args, **kw)
def _trace(self, *args, **kw):
# there is no "trace" level in Twisted -- but this whole
# method will be no-op'd unless we are at the 'trace' level.
self.debug(*args, txaio_trace=True, **kw)
def make_logger(level=None, logger=_Logger, observer=None):
# we want the namespace to be the calling context of "make_logger"
# -- so we *have* to pass namespace kwarg to Logger (or else it
# will always say the context is "make_logger")
cf = inspect.currentframe().f_back
if "self" in cf.f_locals:
# We're probably in a class init or method
cls = cf.f_locals["self"].__class__
namespace = '{0}.{1}'.format(cls.__module__, cls.__name__)
else:
namespace = cf.f_globals["__name__"]
if cf.f_code.co_name != "<module>":
# If it's not the module, and not in a class instance, add the code
# object's name.
namespace = namespace + "." + cf.f_code.co_name
logger = Logger(level=level, namespace=namespace, logger=logger,
observer=observer)
return logger
@provider(ILogObserver)
class _LogObserver(object):
"""
Internal helper.
An observer which formats events to a given file.
"""
to_tx = {
'critical': LogLevel.critical,
'error': LogLevel.error,
'warn': LogLevel.warn,
'info': LogLevel.info,
'debug': LogLevel.debug,
'trace': LogLevel.debug,
}
def __init__(self, out):
self._file = out
self._encode = guess_stream_needs_encoding(out)
self._levels = None
def _acceptable_level(self, level):
if self._levels is None:
target_level = log_levels.index(_log_level)
self._levels = [
self.to_tx[lvl]
for lvl in log_levels
if log_levels.index(lvl) <= target_level and lvl != "none"
]
return level in self._levels
def __call__(self, event):
# it seems if a twisted.logger.Logger() has .failure() called
# on it, the log_format will be None for the traceback after
# "Unhandled error in Deferred" -- perhaps this is a Twisted
# bug?
if event['log_format'] is None:
msg = u'{0} {1}{2}'.format(
formatTime(event["log_time"]),
failure_format_traceback(event['log_failure']),
os.linesep,
)
if self._encode:
msg = msg.encode('utf8')
self._file.write(msg)
else:
# although Logger will already have filtered out unwanted
# levels, bare Logger instances from Twisted code won't have.
if 'log_level' in event and self._acceptable_level(event['log_level']):
msg = u'{0} {1}{2}'.format(
formatTime(event["log_time"]),
formatEvent(event),
os.linesep,
)
if self._encode:
msg = msg.encode('utf8')
self._file.write(msg)
_unspecified = object()
class _TxApi(object):
def __init__(self, config):
self._config = config
def failure_message(self, fail):
"""
:param fail: must be an IFailedFuture
returns a unicode error-message
"""
try:
return u'{0}: {1}'.format(
fail.value.__class__.__name__,
fail.getErrorMessage(),
)
except Exception:
return 'Failed to produce failure message for "{0}"'.format(fail)
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail.tb
def failure_format_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a string
"""
try:
f = six.StringIO()
fail.printTraceback(file=f)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail)
def create_future(self, result=_unspecified, error=_unspecified, canceller=None):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = Deferred(canceller=canceller)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
return f
def create_future_success(self, result):
return succeed(result)
def create_future_error(self, error=None):
return fail(create_failure(error))
def as_future(self, fun, *args, **kwargs):
# Twisted doesn't automagically deal with coroutines on Py3
if PY3_CORO and iscoroutinefunction(fun):
return ensureDeferred(fun(*args, **kwargs))
return maybeDeferred(fun, *args, **kwargs)
def is_future(self, obj):
return isinstance(obj, Deferred)
def call_later(self, delay, fun, *args, **kwargs):
return IReactorTime(self._get_loop()).callLater(delay, fun, *args, **kwargs)
def make_batched_timer(self, bucket_seconds, chunk_size=100):
"""
Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor.
"""
def get_seconds():
return self._get_loop().seconds()
def create_delayed_call(delay, fun, *args, **kwargs):
return self._get_loop().callLater(delay, fun, *args, **kwargs)
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=create_delayed_call,
)
def is_called(self, future):
return future.called
def resolve(self, future, result=None):
future.callback(result)
def reject(self, future, error=None):
if error is None:
error = create_failure()
elif isinstance(error, Exception):
error = Failure(error)
else:
if not isinstance(error, Failure):
raise RuntimeError("reject requires a Failure or Exception")
future.errback(error)
def cancel(self, future):
future.cancel()
def create_failure(self, exception=None):
"""
Create a Failure instance.
if ``exception`` is None (the default), we MUST be inside an
"except" block. This encapsulates the exception into an object
that implements IFailedFuture
"""
if exception:
return Failure(exception)
return Failure()
def add_callbacks(self, future, callback, errback):
"""
callback or errback may be None, but at least one must be
non-None.
"""
assert future is not None
if callback is None:
assert errback is not None
future.addErrback(errback)
else:
# Twisted allows errback to be None here
future.addCallbacks(callback, errback)
return future
def gather(self, futures, consume_exceptions=True):
def completed(res):
rtn = []
for (ok, value) in res:
rtn.append(value)
if not ok and not consume_exceptions:
value.raiseException()
return rtn
# XXX if consume_exceptions is False in asyncio.gather(), it will
# abort on the first raised exception -- should we set
# fireOnOneErrback=True (if consume_exceptions=False?) -- but then
# we'll have to wrap the errback() to extract the "real" failure
# from the FirstError that gets thrown if you set that ...
dl = DeferredList(list(futures), consumeErrors=consume_exceptions)
# we unpack the (ok, value) tuples into just a list of values, so
# that the callback() gets the same value in asyncio and Twisted.
add_callbacks(dl, completed, None)
return dl
def sleep(self, delay):
"""
Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float
"""
d = Deferred()
self._get_loop().callLater(delay, d.callback, None)
return d
def _get_loop(self):
"""
internal helper
"""
# we import and assign the default here (and not, e.g., when
# making Config) so as to delay importing reactor as long as
# possible in case someone is installing a custom one.
if self._config.loop is None:
from twisted.internet import reactor
self._config.loop = reactor
return self._config.loop
def set_global_log_level(level):
"""
Set the global log level on all loggers instantiated by txaio.
"""
for item in _loggers:
if not item._log_level_set_explicitly:
item._set_log_level(level)
global _log_level
_log_level = level
def get_global_log_level():
return _log_level
_default_api = _TxApi(config)
failure_message = _default_api.failure_message
failure_traceback = _default_api.failure_traceback
failure_format_traceback = _default_api.failure_format_traceback
create_future = _default_api.create_future
create_future_success = _default_api.create_future_success
create_future_error = _default_api.create_future_error
as_future = _default_api.as_future
is_future = _default_api.is_future
call_later = _default_api.call_later
make_batched_timer = _default_api.make_batched_timer
is_called = _default_api.is_called
resolve = _default_api.resolve
reject = _default_api.reject
cancel = _default_api.cancel
create_failure = _default_api.create_failure
add_callbacks = _default_api.add_callbacks
gather = _default_api.gather
sleep = _default_api.sleep
|
crossbario/txaio | txaio/tx.py | set_global_log_level | python | def set_global_log_level(level):
for item in _loggers:
if not item._log_level_set_explicitly:
item._set_log_level(level)
global _log_level
_log_level = level | Set the global log level on all loggers instantiated by txaio. | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/tx.py#L554-L562 | null | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from __future__ import absolute_import, division, print_function
import os
import sys
import weakref
import inspect
from functools import partial
from twisted.python.failure import Failure
from twisted.internet.defer import maybeDeferred, Deferred, DeferredList
from twisted.internet.defer import succeed, fail
from twisted.internet.interfaces import IReactorTime
from zope.interface import provider
from txaio.interfaces import IFailedFuture, ILogger, log_levels
from txaio._iotype import guess_stream_needs_encoding
from txaio import _Config
from txaio._common import _BatchedTimer
import six
PY3_CORO = False
if six.PY3:
try:
from twisted.internet.defer import ensureDeferred
from asyncio import iscoroutinefunction
PY3_CORO = True
except ImportError:
pass
using_twisted = True
using_asyncio = False
config = _Config()
_stderr, _stdout = sys.stderr, sys.stdout
# some book-keeping variables here. _observer is used as a global by
# the "backwards compatible" (Twisted < 15) loggers. The _loggers object
# is a weak-ref set; we add Logger instances to this *until* such
# time as start_logging is called (with the desired log-level) and
# then we call _set_log_level on each instance. After that,
# Logger's ctor uses _log_level directly.
_observer = None # for Twisted legacy logging support; see below
_loggers = weakref.WeakSet() # weak-references of each logger we've created
_log_level = 'info' # global log level; possibly changed in start_logging()
_started_logging = False
_categories = {}
IFailedFuture.register(Failure)
_NEW_LOGGER = False
try:
# Twisted 15+
from twisted.logger import Logger as _Logger, formatEvent, ILogObserver
from twisted.logger import globalLogBeginner, formatTime, LogLevel
ILogger.register(_Logger)
_NEW_LOGGER = True
except ImportError:
# we still support Twisted 12 and 13, which doesn't have new-logger
from zope.interface import Interface
from datetime import datetime
import time
# provide our own simple versions of what Twisted new-logger does
class ILogObserver(Interface):
pass
def formatTime(t): # noqa
dt = datetime.fromtimestamp(t)
return six.u(dt.strftime("%Y-%m-%dT%H:%M:%S%z"))
def formatEvent(event): # noqa
msg = event['log_format']
return msg.format(**event)
class LogLevel:
critical = 'critical'
error = 'error'
warn = 'warn'
info = 'info'
debug = 'debug'
trace = 'trace'
@classmethod
def lookupByName(cls, name): # noqa
return getattr(cls, name)
class _Logger(ILogger):
def __init__(self, **kwargs):
self.namespace = kwargs.get('namespace', None)
def emit(self, level, format='', **kwargs):
kwargs['log_time'] = time.time()
kwargs['log_level'] = level
kwargs['log_format'] = format
kwargs['log_namespace'] = self.namespace
# NOTE: the other loggers are ignoring any log messages
# before start_logging() as well
if _observer:
_observer(kwargs)
def _no_op(*args, **kwargs):
pass
def add_log_categories(categories):
_categories.update(categories)
def with_config(loop=None):
global config
if loop is not None:
if config.loop is not None and config.loop is not loop:
raise RuntimeError(
"Twisted has only a single, global reactor. You passed in "
"a reactor different from the one already configured "
"in txaio.config.loop"
)
return _TxApi(config)
# NOTE: beware that twisted.logger._logger.Logger copies itself via an
# overriden __get__ method when used as recommended as a class
# descriptor. So, we override __get__ to just return ``self`` which
# means ``log_source`` will be wrong, but we don't document that as a
# key that you can depend on anyway :/
class Logger(object):
def __init__(self, level=None, logger=None, namespace=None, observer=None):
assert logger, "Should not be instantiated directly."
self._logger = logger(observer=observer, namespace=namespace)
self._log_level_set_explicitly = False
if level:
self.set_log_level(level)
else:
self._set_log_level(_log_level)
_loggers.add(self)
def __get__(self, oself, type=None):
# this causes the Logger to lie about the "source=", but
# otherwise we create a new Logger instance every time we do
# "self.log.info()" if we use it like:
# class Foo:
# log = make_logger
return self
def _log(self, level, *args, **kwargs):
# Look for a log_category, switch it in if we have it
if "log_category" in kwargs and kwargs["log_category"] in _categories:
args = tuple()
kwargs["format"] = _categories.get(kwargs["log_category"])
self._logger.emit(level, *args, **kwargs)
def emit(self, level, *args, **kwargs):
if log_levels.index(self._log_level) < log_levels.index(level):
return
if level == "trace":
return self._trace(*args, **kwargs)
level = LogLevel.lookupByName(level)
return self._log(level, *args, **kwargs)
def set_log_level(self, level, keep=True):
"""
Set the log level. If keep is True, then it will not change along with
global log changes.
"""
self._set_log_level(level)
self._log_level_set_explicitly = keep
def _set_log_level(self, level):
# up to the desired level, we don't do anything, as we're a
# "real" Twisted new-logger; for methods *after* the desired
# level, we bind to the no_op method
desired_index = log_levels.index(level)
for (idx, name) in enumerate(log_levels):
if name == 'none':
continue
if idx > desired_index:
current = getattr(self, name, None)
if not current == _no_op or current is None:
setattr(self, name, _no_op)
if name == 'error':
setattr(self, 'failure', _no_op)
else:
if getattr(self, name, None) in (_no_op, None):
if name == 'trace':
setattr(self, "trace", self._trace)
else:
setattr(self, name,
partial(self._log, LogLevel.lookupByName(name)))
if name == 'error':
setattr(self, "failure", self._failure)
self._log_level = level
def _failure(self, format=None, *args, **kw):
return self._logger.failure(format, *args, **kw)
def _trace(self, *args, **kw):
# there is no "trace" level in Twisted -- but this whole
# method will be no-op'd unless we are at the 'trace' level.
self.debug(*args, txaio_trace=True, **kw)
def make_logger(level=None, logger=_Logger, observer=None):
# we want the namespace to be the calling context of "make_logger"
# -- so we *have* to pass namespace kwarg to Logger (or else it
# will always say the context is "make_logger")
cf = inspect.currentframe().f_back
if "self" in cf.f_locals:
# We're probably in a class init or method
cls = cf.f_locals["self"].__class__
namespace = '{0}.{1}'.format(cls.__module__, cls.__name__)
else:
namespace = cf.f_globals["__name__"]
if cf.f_code.co_name != "<module>":
# If it's not the module, and not in a class instance, add the code
# object's name.
namespace = namespace + "." + cf.f_code.co_name
logger = Logger(level=level, namespace=namespace, logger=logger,
observer=observer)
return logger
@provider(ILogObserver)
class _LogObserver(object):
"""
Internal helper.
An observer which formats events to a given file.
"""
to_tx = {
'critical': LogLevel.critical,
'error': LogLevel.error,
'warn': LogLevel.warn,
'info': LogLevel.info,
'debug': LogLevel.debug,
'trace': LogLevel.debug,
}
def __init__(self, out):
self._file = out
self._encode = guess_stream_needs_encoding(out)
self._levels = None
def _acceptable_level(self, level):
if self._levels is None:
target_level = log_levels.index(_log_level)
self._levels = [
self.to_tx[lvl]
for lvl in log_levels
if log_levels.index(lvl) <= target_level and lvl != "none"
]
return level in self._levels
def __call__(self, event):
# it seems if a twisted.logger.Logger() has .failure() called
# on it, the log_format will be None for the traceback after
# "Unhandled error in Deferred" -- perhaps this is a Twisted
# bug?
if event['log_format'] is None:
msg = u'{0} {1}{2}'.format(
formatTime(event["log_time"]),
failure_format_traceback(event['log_failure']),
os.linesep,
)
if self._encode:
msg = msg.encode('utf8')
self._file.write(msg)
else:
# although Logger will already have filtered out unwanted
# levels, bare Logger instances from Twisted code won't have.
if 'log_level' in event and self._acceptable_level(event['log_level']):
msg = u'{0} {1}{2}'.format(
formatTime(event["log_time"]),
formatEvent(event),
os.linesep,
)
if self._encode:
msg = msg.encode('utf8')
self._file.write(msg)
def start_logging(out=_stdout, level='info'):
"""
Start logging to the file-like object in ``out``. By default, this
is stdout.
"""
global _loggers, _observer, _log_level, _started_logging
if level not in log_levels:
raise RuntimeError(
"Invalid log level '{0}'; valid are: {1}".format(
level, ', '.join(log_levels)
)
)
if _started_logging:
return
_started_logging = True
_log_level = level
set_global_log_level(_log_level)
if out:
_observer = _LogObserver(out)
if _NEW_LOGGER:
_observers = []
if _observer:
_observers.append(_observer)
globalLogBeginner.beginLoggingTo(_observers)
else:
assert out, "out needs to be given a value if using Twisteds before 15.2"
from twisted.python import log
log.startLogging(out)
_unspecified = object()
class _TxApi(object):
def __init__(self, config):
self._config = config
def failure_message(self, fail):
"""
:param fail: must be an IFailedFuture
returns a unicode error-message
"""
try:
return u'{0}: {1}'.format(
fail.value.__class__.__name__,
fail.getErrorMessage(),
)
except Exception:
return 'Failed to produce failure message for "{0}"'.format(fail)
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail.tb
def failure_format_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a string
"""
try:
f = six.StringIO()
fail.printTraceback(file=f)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail)
def create_future(self, result=_unspecified, error=_unspecified, canceller=None):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = Deferred(canceller=canceller)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
return f
def create_future_success(self, result):
return succeed(result)
def create_future_error(self, error=None):
return fail(create_failure(error))
def as_future(self, fun, *args, **kwargs):
# Twisted doesn't automagically deal with coroutines on Py3
if PY3_CORO and iscoroutinefunction(fun):
return ensureDeferred(fun(*args, **kwargs))
return maybeDeferred(fun, *args, **kwargs)
def is_future(self, obj):
return isinstance(obj, Deferred)
def call_later(self, delay, fun, *args, **kwargs):
return IReactorTime(self._get_loop()).callLater(delay, fun, *args, **kwargs)
def make_batched_timer(self, bucket_seconds, chunk_size=100):
"""
Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor.
"""
def get_seconds():
return self._get_loop().seconds()
def create_delayed_call(delay, fun, *args, **kwargs):
return self._get_loop().callLater(delay, fun, *args, **kwargs)
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=create_delayed_call,
)
def is_called(self, future):
return future.called
def resolve(self, future, result=None):
future.callback(result)
def reject(self, future, error=None):
if error is None:
error = create_failure()
elif isinstance(error, Exception):
error = Failure(error)
else:
if not isinstance(error, Failure):
raise RuntimeError("reject requires a Failure or Exception")
future.errback(error)
def cancel(self, future):
future.cancel()
def create_failure(self, exception=None):
"""
Create a Failure instance.
if ``exception`` is None (the default), we MUST be inside an
"except" block. This encapsulates the exception into an object
that implements IFailedFuture
"""
if exception:
return Failure(exception)
return Failure()
def add_callbacks(self, future, callback, errback):
"""
callback or errback may be None, but at least one must be
non-None.
"""
assert future is not None
if callback is None:
assert errback is not None
future.addErrback(errback)
else:
# Twisted allows errback to be None here
future.addCallbacks(callback, errback)
return future
def gather(self, futures, consume_exceptions=True):
def completed(res):
rtn = []
for (ok, value) in res:
rtn.append(value)
if not ok and not consume_exceptions:
value.raiseException()
return rtn
# XXX if consume_exceptions is False in asyncio.gather(), it will
# abort on the first raised exception -- should we set
# fireOnOneErrback=True (if consume_exceptions=False?) -- but then
# we'll have to wrap the errback() to extract the "real" failure
# from the FirstError that gets thrown if you set that ...
dl = DeferredList(list(futures), consumeErrors=consume_exceptions)
# we unpack the (ok, value) tuples into just a list of values, so
# that the callback() gets the same value in asyncio and Twisted.
add_callbacks(dl, completed, None)
return dl
def sleep(self, delay):
"""
Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float
"""
d = Deferred()
self._get_loop().callLater(delay, d.callback, None)
return d
def _get_loop(self):
"""
internal helper
"""
# we import and assign the default here (and not, e.g., when
# making Config) so as to delay importing reactor as long as
# possible in case someone is installing a custom one.
if self._config.loop is None:
from twisted.internet import reactor
self._config.loop = reactor
return self._config.loop
def get_global_log_level():
return _log_level
_default_api = _TxApi(config)
failure_message = _default_api.failure_message
failure_traceback = _default_api.failure_traceback
failure_format_traceback = _default_api.failure_format_traceback
create_future = _default_api.create_future
create_future_success = _default_api.create_future_success
create_future_error = _default_api.create_future_error
as_future = _default_api.as_future
is_future = _default_api.is_future
call_later = _default_api.call_later
make_batched_timer = _default_api.make_batched_timer
is_called = _default_api.is_called
resolve = _default_api.resolve
reject = _default_api.reject
cancel = _default_api.cancel
create_failure = _default_api.create_failure
add_callbacks = _default_api.add_callbacks
gather = _default_api.gather
sleep = _default_api.sleep
|
crossbario/txaio | txaio/tx.py | Logger.set_log_level | python | def set_log_level(self, level, keep=True):
self._set_log_level(level)
self._log_level_set_explicitly = keep | Set the log level. If keep is True, then it will not change along with
global log changes. | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/tx.py#L203-L209 | [
"def _set_log_level(self, level):\n # up to the desired level, we don't do anything, as we're a\n # \"real\" Twisted new-logger; for methods *after* the desired\n # level, we bind to the no_op method\n desired_index = log_levels.index(level)\n\n for (idx, name) in enumerate(log_levels):\n if n... | class Logger(object):
def __init__(self, level=None, logger=None, namespace=None, observer=None):
assert logger, "Should not be instantiated directly."
self._logger = logger(observer=observer, namespace=namespace)
self._log_level_set_explicitly = False
if level:
self.set_log_level(level)
else:
self._set_log_level(_log_level)
_loggers.add(self)
def __get__(self, oself, type=None):
# this causes the Logger to lie about the "source=", but
# otherwise we create a new Logger instance every time we do
# "self.log.info()" if we use it like:
# class Foo:
# log = make_logger
return self
def _log(self, level, *args, **kwargs):
# Look for a log_category, switch it in if we have it
if "log_category" in kwargs and kwargs["log_category"] in _categories:
args = tuple()
kwargs["format"] = _categories.get(kwargs["log_category"])
self._logger.emit(level, *args, **kwargs)
def emit(self, level, *args, **kwargs):
if log_levels.index(self._log_level) < log_levels.index(level):
return
if level == "trace":
return self._trace(*args, **kwargs)
level = LogLevel.lookupByName(level)
return self._log(level, *args, **kwargs)
def _set_log_level(self, level):
# up to the desired level, we don't do anything, as we're a
# "real" Twisted new-logger; for methods *after* the desired
# level, we bind to the no_op method
desired_index = log_levels.index(level)
for (idx, name) in enumerate(log_levels):
if name == 'none':
continue
if idx > desired_index:
current = getattr(self, name, None)
if not current == _no_op or current is None:
setattr(self, name, _no_op)
if name == 'error':
setattr(self, 'failure', _no_op)
else:
if getattr(self, name, None) in (_no_op, None):
if name == 'trace':
setattr(self, "trace", self._trace)
else:
setattr(self, name,
partial(self._log, LogLevel.lookupByName(name)))
if name == 'error':
setattr(self, "failure", self._failure)
self._log_level = level
def _failure(self, format=None, *args, **kw):
return self._logger.failure(format, *args, **kw)
def _trace(self, *args, **kw):
# there is no "trace" level in Twisted -- but this whole
# method will be no-op'd unless we are at the 'trace' level.
self.debug(*args, txaio_trace=True, **kw)
|
crossbario/txaio | txaio/tx.py | _TxApi.failure_message | python | def failure_message(self, fail):
try:
return u'{0}: {1}'.format(
fail.value.__class__.__name__,
fail.getErrorMessage(),
)
except Exception:
return 'Failed to produce failure message for "{0}"'.format(fail) | :param fail: must be an IFailedFuture
returns a unicode error-message | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/tx.py#L376-L387 | null | class _TxApi(object):
def __init__(self, config):
self._config = config
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail.tb
def failure_format_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a string
"""
try:
f = six.StringIO()
fail.printTraceback(file=f)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail)
def create_future(self, result=_unspecified, error=_unspecified, canceller=None):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = Deferred(canceller=canceller)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
return f
def create_future_success(self, result):
return succeed(result)
def create_future_error(self, error=None):
return fail(create_failure(error))
def as_future(self, fun, *args, **kwargs):
# Twisted doesn't automagically deal with coroutines on Py3
if PY3_CORO and iscoroutinefunction(fun):
return ensureDeferred(fun(*args, **kwargs))
return maybeDeferred(fun, *args, **kwargs)
def is_future(self, obj):
return isinstance(obj, Deferred)
def call_later(self, delay, fun, *args, **kwargs):
return IReactorTime(self._get_loop()).callLater(delay, fun, *args, **kwargs)
def make_batched_timer(self, bucket_seconds, chunk_size=100):
"""
Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor.
"""
def get_seconds():
return self._get_loop().seconds()
def create_delayed_call(delay, fun, *args, **kwargs):
return self._get_loop().callLater(delay, fun, *args, **kwargs)
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=create_delayed_call,
)
def is_called(self, future):
return future.called
def resolve(self, future, result=None):
future.callback(result)
def reject(self, future, error=None):
if error is None:
error = create_failure()
elif isinstance(error, Exception):
error = Failure(error)
else:
if not isinstance(error, Failure):
raise RuntimeError("reject requires a Failure or Exception")
future.errback(error)
def cancel(self, future):
future.cancel()
def create_failure(self, exception=None):
"""
Create a Failure instance.
if ``exception`` is None (the default), we MUST be inside an
"except" block. This encapsulates the exception into an object
that implements IFailedFuture
"""
if exception:
return Failure(exception)
return Failure()
def add_callbacks(self, future, callback, errback):
"""
callback or errback may be None, but at least one must be
non-None.
"""
assert future is not None
if callback is None:
assert errback is not None
future.addErrback(errback)
else:
# Twisted allows errback to be None here
future.addCallbacks(callback, errback)
return future
def gather(self, futures, consume_exceptions=True):
def completed(res):
rtn = []
for (ok, value) in res:
rtn.append(value)
if not ok and not consume_exceptions:
value.raiseException()
return rtn
# XXX if consume_exceptions is False in asyncio.gather(), it will
# abort on the first raised exception -- should we set
# fireOnOneErrback=True (if consume_exceptions=False?) -- but then
# we'll have to wrap the errback() to extract the "real" failure
# from the FirstError that gets thrown if you set that ...
dl = DeferredList(list(futures), consumeErrors=consume_exceptions)
# we unpack the (ok, value) tuples into just a list of values, so
# that the callback() gets the same value in asyncio and Twisted.
add_callbacks(dl, completed, None)
return dl
def sleep(self, delay):
"""
Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float
"""
d = Deferred()
self._get_loop().callLater(delay, d.callback, None)
return d
def _get_loop(self):
"""
internal helper
"""
# we import and assign the default here (and not, e.g., when
# making Config) so as to delay importing reactor as long as
# possible in case someone is installing a custom one.
if self._config.loop is None:
from twisted.internet import reactor
self._config.loop = reactor
return self._config.loop
|
crossbario/txaio | txaio/tx.py | _TxApi.failure_format_traceback | python | def failure_format_traceback(self, fail):
try:
f = six.StringIO()
fail.printTraceback(file=f)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail) | :param fail: must be an IFailedFuture
returns a string | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/tx.py#L396-L406 | null | class _TxApi(object):
def __init__(self, config):
self._config = config
def failure_message(self, fail):
"""
:param fail: must be an IFailedFuture
returns a unicode error-message
"""
try:
return u'{0}: {1}'.format(
fail.value.__class__.__name__,
fail.getErrorMessage(),
)
except Exception:
return 'Failed to produce failure message for "{0}"'.format(fail)
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail.tb
def create_future(self, result=_unspecified, error=_unspecified, canceller=None):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = Deferred(canceller=canceller)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
return f
def create_future_success(self, result):
return succeed(result)
def create_future_error(self, error=None):
return fail(create_failure(error))
def as_future(self, fun, *args, **kwargs):
# Twisted doesn't automagically deal with coroutines on Py3
if PY3_CORO and iscoroutinefunction(fun):
return ensureDeferred(fun(*args, **kwargs))
return maybeDeferred(fun, *args, **kwargs)
def is_future(self, obj):
return isinstance(obj, Deferred)
def call_later(self, delay, fun, *args, **kwargs):
return IReactorTime(self._get_loop()).callLater(delay, fun, *args, **kwargs)
def make_batched_timer(self, bucket_seconds, chunk_size=100):
"""
Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor.
"""
def get_seconds():
return self._get_loop().seconds()
def create_delayed_call(delay, fun, *args, **kwargs):
return self._get_loop().callLater(delay, fun, *args, **kwargs)
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=create_delayed_call,
)
def is_called(self, future):
return future.called
def resolve(self, future, result=None):
future.callback(result)
def reject(self, future, error=None):
if error is None:
error = create_failure()
elif isinstance(error, Exception):
error = Failure(error)
else:
if not isinstance(error, Failure):
raise RuntimeError("reject requires a Failure or Exception")
future.errback(error)
def cancel(self, future):
future.cancel()
def create_failure(self, exception=None):
"""
Create a Failure instance.
if ``exception`` is None (the default), we MUST be inside an
"except" block. This encapsulates the exception into an object
that implements IFailedFuture
"""
if exception:
return Failure(exception)
return Failure()
def add_callbacks(self, future, callback, errback):
"""
callback or errback may be None, but at least one must be
non-None.
"""
assert future is not None
if callback is None:
assert errback is not None
future.addErrback(errback)
else:
# Twisted allows errback to be None here
future.addCallbacks(callback, errback)
return future
def gather(self, futures, consume_exceptions=True):
def completed(res):
rtn = []
for (ok, value) in res:
rtn.append(value)
if not ok and not consume_exceptions:
value.raiseException()
return rtn
# XXX if consume_exceptions is False in asyncio.gather(), it will
# abort on the first raised exception -- should we set
# fireOnOneErrback=True (if consume_exceptions=False?) -- but then
# we'll have to wrap the errback() to extract the "real" failure
# from the FirstError that gets thrown if you set that ...
dl = DeferredList(list(futures), consumeErrors=consume_exceptions)
# we unpack the (ok, value) tuples into just a list of values, so
# that the callback() gets the same value in asyncio and Twisted.
add_callbacks(dl, completed, None)
return dl
def sleep(self, delay):
"""
Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float
"""
d = Deferred()
self._get_loop().callLater(delay, d.callback, None)
return d
def _get_loop(self):
"""
internal helper
"""
# we import and assign the default here (and not, e.g., when
# making Config) so as to delay importing reactor as long as
# possible in case someone is installing a custom one.
if self._config.loop is None:
from twisted.internet import reactor
self._config.loop = reactor
return self._config.loop
|
crossbario/txaio | txaio/tx.py | _TxApi.make_batched_timer | python | def make_batched_timer(self, bucket_seconds, chunk_size=100):
def get_seconds():
return self._get_loop().seconds()
def create_delayed_call(delay, fun, *args, **kwargs):
return self._get_loop().callLater(delay, fun, *args, **kwargs)
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=create_delayed_call,
) | Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor. | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/tx.py#L437-L462 | null | class _TxApi(object):
def __init__(self, config):
self._config = config
def failure_message(self, fail):
"""
:param fail: must be an IFailedFuture
returns a unicode error-message
"""
try:
return u'{0}: {1}'.format(
fail.value.__class__.__name__,
fail.getErrorMessage(),
)
except Exception:
return 'Failed to produce failure message for "{0}"'.format(fail)
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail.tb
def failure_format_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a string
"""
try:
f = six.StringIO()
fail.printTraceback(file=f)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail)
def create_future(self, result=_unspecified, error=_unspecified, canceller=None):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = Deferred(canceller=canceller)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
return f
def create_future_success(self, result):
return succeed(result)
def create_future_error(self, error=None):
return fail(create_failure(error))
def as_future(self, fun, *args, **kwargs):
# Twisted doesn't automagically deal with coroutines on Py3
if PY3_CORO and iscoroutinefunction(fun):
return ensureDeferred(fun(*args, **kwargs))
return maybeDeferred(fun, *args, **kwargs)
def is_future(self, obj):
return isinstance(obj, Deferred)
def call_later(self, delay, fun, *args, **kwargs):
return IReactorTime(self._get_loop()).callLater(delay, fun, *args, **kwargs)
def is_called(self, future):
return future.called
def resolve(self, future, result=None):
future.callback(result)
def reject(self, future, error=None):
if error is None:
error = create_failure()
elif isinstance(error, Exception):
error = Failure(error)
else:
if not isinstance(error, Failure):
raise RuntimeError("reject requires a Failure or Exception")
future.errback(error)
def cancel(self, future):
future.cancel()
def create_failure(self, exception=None):
"""
Create a Failure instance.
if ``exception`` is None (the default), we MUST be inside an
"except" block. This encapsulates the exception into an object
that implements IFailedFuture
"""
if exception:
return Failure(exception)
return Failure()
def add_callbacks(self, future, callback, errback):
"""
callback or errback may be None, but at least one must be
non-None.
"""
assert future is not None
if callback is None:
assert errback is not None
future.addErrback(errback)
else:
# Twisted allows errback to be None here
future.addCallbacks(callback, errback)
return future
def gather(self, futures, consume_exceptions=True):
def completed(res):
rtn = []
for (ok, value) in res:
rtn.append(value)
if not ok and not consume_exceptions:
value.raiseException()
return rtn
# XXX if consume_exceptions is False in asyncio.gather(), it will
# abort on the first raised exception -- should we set
# fireOnOneErrback=True (if consume_exceptions=False?) -- but then
# we'll have to wrap the errback() to extract the "real" failure
# from the FirstError that gets thrown if you set that ...
dl = DeferredList(list(futures), consumeErrors=consume_exceptions)
# we unpack the (ok, value) tuples into just a list of values, so
# that the callback() gets the same value in asyncio and Twisted.
add_callbacks(dl, completed, None)
return dl
def sleep(self, delay):
"""
Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float
"""
d = Deferred()
self._get_loop().callLater(delay, d.callback, None)
return d
def _get_loop(self):
"""
internal helper
"""
# we import and assign the default here (and not, e.g., when
# making Config) so as to delay importing reactor as long as
# possible in case someone is installing a custom one.
if self._config.loop is None:
from twisted.internet import reactor
self._config.loop = reactor
return self._config.loop
|
crossbario/txaio | txaio/tx.py | _TxApi.add_callbacks | python | def add_callbacks(self, future, callback, errback):
assert future is not None
if callback is None:
assert errback is not None
future.addErrback(errback)
else:
# Twisted allows errback to be None here
future.addCallbacks(callback, errback)
return future | callback or errback may be None, but at least one must be
non-None. | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/tx.py#L495-L507 | null | class _TxApi(object):
def __init__(self, config):
self._config = config
def failure_message(self, fail):
"""
:param fail: must be an IFailedFuture
returns a unicode error-message
"""
try:
return u'{0}: {1}'.format(
fail.value.__class__.__name__,
fail.getErrorMessage(),
)
except Exception:
return 'Failed to produce failure message for "{0}"'.format(fail)
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail.tb
def failure_format_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a string
"""
try:
f = six.StringIO()
fail.printTraceback(file=f)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail)
def create_future(self, result=_unspecified, error=_unspecified, canceller=None):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = Deferred(canceller=canceller)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
return f
def create_future_success(self, result):
return succeed(result)
def create_future_error(self, error=None):
return fail(create_failure(error))
def as_future(self, fun, *args, **kwargs):
# Twisted doesn't automagically deal with coroutines on Py3
if PY3_CORO and iscoroutinefunction(fun):
return ensureDeferred(fun(*args, **kwargs))
return maybeDeferred(fun, *args, **kwargs)
def is_future(self, obj):
return isinstance(obj, Deferred)
def call_later(self, delay, fun, *args, **kwargs):
return IReactorTime(self._get_loop()).callLater(delay, fun, *args, **kwargs)
def make_batched_timer(self, bucket_seconds, chunk_size=100):
"""
Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor.
"""
def get_seconds():
return self._get_loop().seconds()
def create_delayed_call(delay, fun, *args, **kwargs):
return self._get_loop().callLater(delay, fun, *args, **kwargs)
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=create_delayed_call,
)
def is_called(self, future):
return future.called
def resolve(self, future, result=None):
future.callback(result)
def reject(self, future, error=None):
if error is None:
error = create_failure()
elif isinstance(error, Exception):
error = Failure(error)
else:
if not isinstance(error, Failure):
raise RuntimeError("reject requires a Failure or Exception")
future.errback(error)
def cancel(self, future):
future.cancel()
def create_failure(self, exception=None):
"""
Create a Failure instance.
if ``exception`` is None (the default), we MUST be inside an
"except" block. This encapsulates the exception into an object
that implements IFailedFuture
"""
if exception:
return Failure(exception)
return Failure()
def gather(self, futures, consume_exceptions=True):
def completed(res):
rtn = []
for (ok, value) in res:
rtn.append(value)
if not ok and not consume_exceptions:
value.raiseException()
return rtn
# XXX if consume_exceptions is False in asyncio.gather(), it will
# abort on the first raised exception -- should we set
# fireOnOneErrback=True (if consume_exceptions=False?) -- but then
# we'll have to wrap the errback() to extract the "real" failure
# from the FirstError that gets thrown if you set that ...
dl = DeferredList(list(futures), consumeErrors=consume_exceptions)
# we unpack the (ok, value) tuples into just a list of values, so
# that the callback() gets the same value in asyncio and Twisted.
add_callbacks(dl, completed, None)
return dl
def sleep(self, delay):
"""
Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float
"""
d = Deferred()
self._get_loop().callLater(delay, d.callback, None)
return d
def _get_loop(self):
"""
internal helper
"""
# we import and assign the default here (and not, e.g., when
# making Config) so as to delay importing reactor as long as
# possible in case someone is installing a custom one.
if self._config.loop is None:
from twisted.internet import reactor
self._config.loop = reactor
return self._config.loop
|
crossbario/txaio | txaio/tx.py | _TxApi.sleep | python | def sleep(self, delay):
d = Deferred()
self._get_loop().callLater(delay, d.callback, None)
return d | Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/tx.py#L530-L539 | [
"def _get_loop(self):\n \"\"\"\n internal helper\n \"\"\"\n # we import and assign the default here (and not, e.g., when\n # making Config) so as to delay importing reactor as long as\n # possible in case someone is installing a custom one.\n if self._config.loop is None:\n from twisted.... | class _TxApi(object):
def __init__(self, config):
self._config = config
def failure_message(self, fail):
"""
:param fail: must be an IFailedFuture
returns a unicode error-message
"""
try:
return u'{0}: {1}'.format(
fail.value.__class__.__name__,
fail.getErrorMessage(),
)
except Exception:
return 'Failed to produce failure message for "{0}"'.format(fail)
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail.tb
def failure_format_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a string
"""
try:
f = six.StringIO()
fail.printTraceback(file=f)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail)
def create_future(self, result=_unspecified, error=_unspecified, canceller=None):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = Deferred(canceller=canceller)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
return f
def create_future_success(self, result):
return succeed(result)
def create_future_error(self, error=None):
return fail(create_failure(error))
def as_future(self, fun, *args, **kwargs):
# Twisted doesn't automagically deal with coroutines on Py3
if PY3_CORO and iscoroutinefunction(fun):
return ensureDeferred(fun(*args, **kwargs))
return maybeDeferred(fun, *args, **kwargs)
def is_future(self, obj):
return isinstance(obj, Deferred)
def call_later(self, delay, fun, *args, **kwargs):
return IReactorTime(self._get_loop()).callLater(delay, fun, *args, **kwargs)
def make_batched_timer(self, bucket_seconds, chunk_size=100):
"""
Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor.
"""
def get_seconds():
return self._get_loop().seconds()
def create_delayed_call(delay, fun, *args, **kwargs):
return self._get_loop().callLater(delay, fun, *args, **kwargs)
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=create_delayed_call,
)
def is_called(self, future):
return future.called
def resolve(self, future, result=None):
future.callback(result)
def reject(self, future, error=None):
if error is None:
error = create_failure()
elif isinstance(error, Exception):
error = Failure(error)
else:
if not isinstance(error, Failure):
raise RuntimeError("reject requires a Failure or Exception")
future.errback(error)
def cancel(self, future):
future.cancel()
def create_failure(self, exception=None):
"""
Create a Failure instance.
if ``exception`` is None (the default), we MUST be inside an
"except" block. This encapsulates the exception into an object
that implements IFailedFuture
"""
if exception:
return Failure(exception)
return Failure()
def add_callbacks(self, future, callback, errback):
"""
callback or errback may be None, but at least one must be
non-None.
"""
assert future is not None
if callback is None:
assert errback is not None
future.addErrback(errback)
else:
# Twisted allows errback to be None here
future.addCallbacks(callback, errback)
return future
def gather(self, futures, consume_exceptions=True):
def completed(res):
rtn = []
for (ok, value) in res:
rtn.append(value)
if not ok and not consume_exceptions:
value.raiseException()
return rtn
# XXX if consume_exceptions is False in asyncio.gather(), it will
# abort on the first raised exception -- should we set
# fireOnOneErrback=True (if consume_exceptions=False?) -- but then
# we'll have to wrap the errback() to extract the "real" failure
# from the FirstError that gets thrown if you set that ...
dl = DeferredList(list(futures), consumeErrors=consume_exceptions)
# we unpack the (ok, value) tuples into just a list of values, so
# that the callback() gets the same value in asyncio and Twisted.
add_callbacks(dl, completed, None)
return dl
def _get_loop(self):
"""
internal helper
"""
# we import and assign the default here (and not, e.g., when
# making Config) so as to delay importing reactor as long as
# possible in case someone is installing a custom one.
if self._config.loop is None:
from twisted.internet import reactor
self._config.loop = reactor
return self._config.loop
|
crossbario/txaio | txaio/tx.py | _TxApi._get_loop | python | def _get_loop(self):
# we import and assign the default here (and not, e.g., when
# making Config) so as to delay importing reactor as long as
# possible in case someone is installing a custom one.
if self._config.loop is None:
from twisted.internet import reactor
self._config.loop = reactor
return self._config.loop | internal helper | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/tx.py#L541-L551 | null | class _TxApi(object):
def __init__(self, config):
self._config = config
def failure_message(self, fail):
"""
:param fail: must be an IFailedFuture
returns a unicode error-message
"""
try:
return u'{0}: {1}'.format(
fail.value.__class__.__name__,
fail.getErrorMessage(),
)
except Exception:
return 'Failed to produce failure message for "{0}"'.format(fail)
def failure_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a traceback instance
"""
return fail.tb
def failure_format_traceback(self, fail):
"""
:param fail: must be an IFailedFuture
returns a string
"""
try:
f = six.StringIO()
fail.printTraceback(file=f)
return f.getvalue()
except Exception:
return u"Failed to format failure traceback for '{0}'".format(fail)
def create_future(self, result=_unspecified, error=_unspecified, canceller=None):
if result is not _unspecified and error is not _unspecified:
raise ValueError("Cannot have both result and error.")
f = Deferred(canceller=canceller)
if result is not _unspecified:
resolve(f, result)
elif error is not _unspecified:
reject(f, error)
return f
def create_future_success(self, result):
return succeed(result)
def create_future_error(self, error=None):
return fail(create_failure(error))
def as_future(self, fun, *args, **kwargs):
# Twisted doesn't automagically deal with coroutines on Py3
if PY3_CORO and iscoroutinefunction(fun):
return ensureDeferred(fun(*args, **kwargs))
return maybeDeferred(fun, *args, **kwargs)
def is_future(self, obj):
return isinstance(obj, Deferred)
def call_later(self, delay, fun, *args, **kwargs):
return IReactorTime(self._get_loop()).callLater(delay, fun, *args, **kwargs)
def make_batched_timer(self, bucket_seconds, chunk_size=100):
"""
Creates and returns an object implementing
:class:`txaio.IBatchedTimer`.
:param bucket_seconds: the number of seconds in each bucket. That
is, a value of 5 means that any timeout within a 5 second
window will be in the same bucket, and get notified at the
same time. This is only accurate to "milliseconds".
:param chunk_size: when "doing" the callbacks in a particular
bucket, this controls how many we do at once before yielding to
the reactor.
"""
def get_seconds():
return self._get_loop().seconds()
def create_delayed_call(delay, fun, *args, **kwargs):
return self._get_loop().callLater(delay, fun, *args, **kwargs)
return _BatchedTimer(
bucket_seconds * 1000.0, chunk_size,
seconds_provider=get_seconds,
delayed_call_creator=create_delayed_call,
)
def is_called(self, future):
return future.called
def resolve(self, future, result=None):
future.callback(result)
def reject(self, future, error=None):
if error is None:
error = create_failure()
elif isinstance(error, Exception):
error = Failure(error)
else:
if not isinstance(error, Failure):
raise RuntimeError("reject requires a Failure or Exception")
future.errback(error)
def cancel(self, future):
future.cancel()
def create_failure(self, exception=None):
"""
Create a Failure instance.
if ``exception`` is None (the default), we MUST be inside an
"except" block. This encapsulates the exception into an object
that implements IFailedFuture
"""
if exception:
return Failure(exception)
return Failure()
def add_callbacks(self, future, callback, errback):
"""
callback or errback may be None, but at least one must be
non-None.
"""
assert future is not None
if callback is None:
assert errback is not None
future.addErrback(errback)
else:
# Twisted allows errback to be None here
future.addCallbacks(callback, errback)
return future
def gather(self, futures, consume_exceptions=True):
def completed(res):
rtn = []
for (ok, value) in res:
rtn.append(value)
if not ok and not consume_exceptions:
value.raiseException()
return rtn
# XXX if consume_exceptions is False in asyncio.gather(), it will
# abort on the first raised exception -- should we set
# fireOnOneErrback=True (if consume_exceptions=False?) -- but then
# we'll have to wrap the errback() to extract the "real" failure
# from the FirstError that gets thrown if you set that ...
dl = DeferredList(list(futures), consumeErrors=consume_exceptions)
# we unpack the (ok, value) tuples into just a list of values, so
# that the callback() gets the same value in asyncio and Twisted.
add_callbacks(dl, completed, None)
return dl
def sleep(self, delay):
"""
Inline sleep for use in co-routines.
:param delay: Time to sleep in seconds.
:type delay: float
"""
d = Deferred()
self._get_loop().callLater(delay, d.callback, None)
return d
|
crossbario/txaio | txaio/_common.py | _BatchedTimer.call_later | python | def call_later(self, delay, func, *args, **kwargs):
# "quantize" the delay to the nearest bucket
now = self._get_seconds()
real_time = int(now + delay) * 1000
real_time -= int(real_time % self._bucket_milliseconds)
call = _BatchedCall(self, real_time, lambda: func(*args, **kwargs))
try:
self._buckets[real_time][1].append(call)
except KeyError:
# new bucket; need to add "actual" underlying IDelayedCall
diff = (real_time / 1000.0) - now
# we need to clamp this because if we quantized to the
# nearest second, but that second is actually (slightly)
# less than the current time 'diff' will be negative.
delayed_call = self._create_delayed_call(
max(0.0, diff),
self._notify_bucket, real_time,
)
self._buckets[real_time] = (delayed_call, [call])
return call | IBatchedTimer API | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/_common.py#L53-L75 | null | class _BatchedTimer(IBatchedTimer):
"""
Internal helper.
Instances of this are returned from
:meth:`txaio.make_batched_timer` and that is the only way they
should be instantiated. You may depend on methods from the
interface class only (:class:`txaio.IBatchedTimer`)
**NOTE** that the times are in milliseconds in this class!
"""
def __init__(self, bucket_milliseconds, chunk_size,
seconds_provider, delayed_call_creator, loop=None):
if bucket_milliseconds <= 0.0:
raise ValueError(
"bucket_milliseconds must be > 0.0"
)
self._bucket_milliseconds = float(bucket_milliseconds)
self._chunk_size = chunk_size
self._get_seconds = seconds_provider
self._create_delayed_call = delayed_call_creator
self._buckets = dict() # real seconds -> (IDelayedCall, list)
self._loop = loop
def _notify_bucket(self, real_time):
"""
Internal helper. This 'does' the callbacks in a particular bucket.
:param real_time: the bucket to do callbacks on
"""
(delayed_call, calls) = self._buckets[real_time]
del self._buckets[real_time]
errors = []
def notify_one_chunk(calls, chunk_size, chunk_delay_ms):
for call in calls[:chunk_size]:
try:
call()
except Exception as e:
errors.append(e)
calls = calls[chunk_size:]
if calls:
self._create_delayed_call(
chunk_delay_ms / 1000.0,
notify_one_chunk, calls, chunk_size, chunk_delay_ms,
)
else:
# done all calls; make sure there were no errors
if len(errors):
msg = u"Error(s) processing call_later bucket:\n"
for e in errors:
msg += u"{}\n".format(e)
raise RuntimeError(msg)
# ceil()ing because we want the number of chunks, and a
# partial chunk is still a chunk
delay_ms = self._bucket_milliseconds / math.ceil(float(len(calls)) / self._chunk_size)
# I can't imagine any scenario in which chunk_delay_ms is
# actually less than zero, but just being safe here
notify_one_chunk(calls, self._chunk_size, max(0.0, delay_ms))
def _remove_call(self, real_time, call):
"""
Internal helper. Removes a (possibly still pending) call from a
bucket. It is *not* an error of the bucket is gone (e.g. the
call has already happened).
"""
try:
(delayed_call, calls) = self._buckets[real_time]
except KeyError:
# no such bucket ... error? swallow?
return
# remove call; if we're empty, cancel underlying
# bucket-timeout IDelayedCall
calls.remove(call)
if not calls:
del self._buckets[real_time]
delayed_call.cancel()
|
crossbario/txaio | txaio/_common.py | _BatchedTimer._notify_bucket | python | def _notify_bucket(self, real_time):
(delayed_call, calls) = self._buckets[real_time]
del self._buckets[real_time]
errors = []
def notify_one_chunk(calls, chunk_size, chunk_delay_ms):
for call in calls[:chunk_size]:
try:
call()
except Exception as e:
errors.append(e)
calls = calls[chunk_size:]
if calls:
self._create_delayed_call(
chunk_delay_ms / 1000.0,
notify_one_chunk, calls, chunk_size, chunk_delay_ms,
)
else:
# done all calls; make sure there were no errors
if len(errors):
msg = u"Error(s) processing call_later bucket:\n"
for e in errors:
msg += u"{}\n".format(e)
raise RuntimeError(msg)
# ceil()ing because we want the number of chunks, and a
# partial chunk is still a chunk
delay_ms = self._bucket_milliseconds / math.ceil(float(len(calls)) / self._chunk_size)
# I can't imagine any scenario in which chunk_delay_ms is
# actually less than zero, but just being safe here
notify_one_chunk(calls, self._chunk_size, max(0.0, delay_ms)) | Internal helper. This 'does' the callbacks in a particular bucket.
:param real_time: the bucket to do callbacks on | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/_common.py#L77-L111 | null | class _BatchedTimer(IBatchedTimer):
"""
Internal helper.
Instances of this are returned from
:meth:`txaio.make_batched_timer` and that is the only way they
should be instantiated. You may depend on methods from the
interface class only (:class:`txaio.IBatchedTimer`)
**NOTE** that the times are in milliseconds in this class!
"""
def __init__(self, bucket_milliseconds, chunk_size,
seconds_provider, delayed_call_creator, loop=None):
if bucket_milliseconds <= 0.0:
raise ValueError(
"bucket_milliseconds must be > 0.0"
)
self._bucket_milliseconds = float(bucket_milliseconds)
self._chunk_size = chunk_size
self._get_seconds = seconds_provider
self._create_delayed_call = delayed_call_creator
self._buckets = dict() # real seconds -> (IDelayedCall, list)
self._loop = loop
def call_later(self, delay, func, *args, **kwargs):
"""
IBatchedTimer API
"""
# "quantize" the delay to the nearest bucket
now = self._get_seconds()
real_time = int(now + delay) * 1000
real_time -= int(real_time % self._bucket_milliseconds)
call = _BatchedCall(self, real_time, lambda: func(*args, **kwargs))
try:
self._buckets[real_time][1].append(call)
except KeyError:
# new bucket; need to add "actual" underlying IDelayedCall
diff = (real_time / 1000.0) - now
# we need to clamp this because if we quantized to the
# nearest second, but that second is actually (slightly)
# less than the current time 'diff' will be negative.
delayed_call = self._create_delayed_call(
max(0.0, diff),
self._notify_bucket, real_time,
)
self._buckets[real_time] = (delayed_call, [call])
return call
def _remove_call(self, real_time, call):
"""
Internal helper. Removes a (possibly still pending) call from a
bucket. It is *not* an error of the bucket is gone (e.g. the
call has already happened).
"""
try:
(delayed_call, calls) = self._buckets[real_time]
except KeyError:
# no such bucket ... error? swallow?
return
# remove call; if we're empty, cancel underlying
# bucket-timeout IDelayedCall
calls.remove(call)
if not calls:
del self._buckets[real_time]
delayed_call.cancel()
|
crossbario/txaio | txaio/_common.py | _BatchedTimer._remove_call | python | def _remove_call(self, real_time, call):
try:
(delayed_call, calls) = self._buckets[real_time]
except KeyError:
# no such bucket ... error? swallow?
return
# remove call; if we're empty, cancel underlying
# bucket-timeout IDelayedCall
calls.remove(call)
if not calls:
del self._buckets[real_time]
delayed_call.cancel() | Internal helper. Removes a (possibly still pending) call from a
bucket. It is *not* an error of the bucket is gone (e.g. the
call has already happened). | train | https://github.com/crossbario/txaio/blob/29c77ff1210cabd4cc03f16f34672612e7eef704/txaio/_common.py#L113-L129 | null | class _BatchedTimer(IBatchedTimer):
"""
Internal helper.
Instances of this are returned from
:meth:`txaio.make_batched_timer` and that is the only way they
should be instantiated. You may depend on methods from the
interface class only (:class:`txaio.IBatchedTimer`)
**NOTE** that the times are in milliseconds in this class!
"""
def __init__(self, bucket_milliseconds, chunk_size,
seconds_provider, delayed_call_creator, loop=None):
if bucket_milliseconds <= 0.0:
raise ValueError(
"bucket_milliseconds must be > 0.0"
)
self._bucket_milliseconds = float(bucket_milliseconds)
self._chunk_size = chunk_size
self._get_seconds = seconds_provider
self._create_delayed_call = delayed_call_creator
self._buckets = dict() # real seconds -> (IDelayedCall, list)
self._loop = loop
def call_later(self, delay, func, *args, **kwargs):
"""
IBatchedTimer API
"""
# "quantize" the delay to the nearest bucket
now = self._get_seconds()
real_time = int(now + delay) * 1000
real_time -= int(real_time % self._bucket_milliseconds)
call = _BatchedCall(self, real_time, lambda: func(*args, **kwargs))
try:
self._buckets[real_time][1].append(call)
except KeyError:
# new bucket; need to add "actual" underlying IDelayedCall
diff = (real_time / 1000.0) - now
# we need to clamp this because if we quantized to the
# nearest second, but that second is actually (slightly)
# less than the current time 'diff' will be negative.
delayed_call = self._create_delayed_call(
max(0.0, diff),
self._notify_bucket, real_time,
)
self._buckets[real_time] = (delayed_call, [call])
return call
def _notify_bucket(self, real_time):
"""
Internal helper. This 'does' the callbacks in a particular bucket.
:param real_time: the bucket to do callbacks on
"""
(delayed_call, calls) = self._buckets[real_time]
del self._buckets[real_time]
errors = []
def notify_one_chunk(calls, chunk_size, chunk_delay_ms):
for call in calls[:chunk_size]:
try:
call()
except Exception as e:
errors.append(e)
calls = calls[chunk_size:]
if calls:
self._create_delayed_call(
chunk_delay_ms / 1000.0,
notify_one_chunk, calls, chunk_size, chunk_delay_ms,
)
else:
# done all calls; make sure there were no errors
if len(errors):
msg = u"Error(s) processing call_later bucket:\n"
for e in errors:
msg += u"{}\n".format(e)
raise RuntimeError(msg)
# ceil()ing because we want the number of chunks, and a
# partial chunk is still a chunk
delay_ms = self._bucket_milliseconds / math.ceil(float(len(calls)) / self._chunk_size)
# I can't imagine any scenario in which chunk_delay_ms is
# actually less than zero, but just being safe here
notify_one_chunk(calls, self._chunk_size, max(0.0, delay_ms))
|
pyblish/pyblish-lite | pyblish_lite/delegate.py | Item.paint | python | def paint(self, painter, option, index):
body_rect = QtCore.QRectF(option.rect)
check_rect = QtCore.QRectF(body_rect)
check_rect.setWidth(check_rect.height())
check_rect.adjust(6, 6, -6, -6)
check_color = colors["idle"]
if index.data(model.IsProcessing) is True:
check_color = colors["active"]
elif index.data(model.HasFailed) is True:
check_color = colors["warning"]
elif index.data(model.HasSucceeded) is True:
check_color = colors["ok"]
elif index.data(model.HasProcessed) is True:
check_color = colors["ok"]
metrics = painter.fontMetrics()
label_rect = QtCore.QRectF(option.rect.adjusted(
check_rect.width() + 12, 2, 0, -2))
assert label_rect.width() > 0
label = index.data(model.Label)
label = metrics.elidedText(label,
QtCore.Qt.ElideRight,
label_rect.width() - 20)
font_color = colors["idle"]
if not index.data(model.IsChecked):
font_color = colors["inactive"]
# Maintain reference to state, so we can restore it once we're done
painter.save()
# Draw label
painter.setFont(fonts["h4"])
painter.setPen(QtGui.QPen(font_color))
painter.drawText(label_rect, label)
# Draw action icon
if index.data(model.ActionIconVisible):
painter.save()
if index.data(model.ActionIdle):
color = colors["idle"]
elif index.data(model.IsProcessing):
color = colors["active"]
elif index.data(model.ActionFailed):
color = colors["warning"]
else:
color = colors["ok"]
painter.setFont(fonts["smallAwesome"])
painter.setPen(QtGui.QPen(color))
icon_rect = QtCore.QRectF(option.rect.adjusted(
label_rect.width() + 1, label_rect.height() / 3, 0, 0))
painter.drawText(icon_rect, icons["action"])
painter.restore()
# Draw checkbox
pen = QtGui.QPen(check_color, 1)
painter.setPen(pen)
if index.data(model.IsOptional):
painter.drawRect(check_rect)
if index.data(model.IsChecked):
painter.fillRect(check_rect, check_color)
elif not index.data(model.IsIdle) and index.data(model.IsChecked):
painter.fillRect(check_rect, check_color)
if option.state & QtWidgets.QStyle.State_MouseOver:
painter.fillRect(body_rect, colors["hover"])
if option.state & QtWidgets.QStyle.State_Selected:
painter.fillRect(body_rect, colors["selected"])
# Ok, we're done, tidy up.
painter.restore() | Paint checkbox and text
_
|_| My label | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/delegate.py#L50-L143 | null | class Item(QtWidgets.QStyledItemDelegate):
"""Generic delegate for model items"""
def sizeHint(self, option, index):
return QtCore.QSize(option.rect.width(), 20)
|
pyblish/pyblish-lite | pyblish_lite/delegate.py | Artist.paint | python | def paint(self, painter, option, index):
# Layout
spacing = 10
metrics = painter.fontMetrics()
body_rect = QtCore.QRectF(option.rect).adjusted(2, 2, -8, -2)
content_rect = body_rect.adjusted(5, 5, -5, -5)
toggle_rect = QtCore.QRectF(body_rect)
toggle_rect.setWidth(7)
toggle_rect.adjust(1, 1, 0, -1)
icon_rect = QtCore.QRectF(content_rect)
icon_rect.translate(toggle_rect.width() + spacing, 3)
icon_rect.setWidth(35)
icon_rect.setHeight(35)
duration_rect = QtCore.QRectF(content_rect)
duration_rect.translate(content_rect.width() - 50, 0)
label_rect = QtCore.QRectF(content_rect)
label_rect.translate(icon_rect.width() +
spacing, 0)
label_rect.setHeight(metrics.lineSpacing() + spacing)
families_rect = QtCore.QRectF(label_rect)
families_rect.translate(0, label_rect.height())
# Colors
check_color = colors["idle"]
if index.data(model.IsProcessing) is True:
check_color = colors["active"]
elif index.data(model.HasFailed) is True:
check_color = colors["warning"]
elif index.data(model.HasSucceeded) is True:
check_color = colors["ok"]
elif index.data(model.HasProcessed) is True:
check_color = colors["ok"]
icon = index.data(model.Icon) or icons["file"]
label = index.data(model.Label)
families = ", ".join(index.data(model.Families))
# Elide
label = metrics.elidedText(label,
QtCore.Qt.ElideRight,
label_rect.width())
families = metrics.elidedText(families,
QtCore.Qt.ElideRight,
label_rect.width())
font_color = colors["idle"]
if not index.data(model.IsChecked):
font_color = colors["inactive"]
# Maintan reference to state, so we can restore it once we're done
painter.save()
# Draw background
painter.fillRect(body_rect, colors["hover"])
painter.setFont(fonts["largeAwesome"])
painter.setPen(QtGui.QPen(font_color))
painter.drawText(icon_rect, icon)
# Draw label
painter.setFont(fonts["h3"])
painter.drawText(label_rect, label)
# Draw families
painter.setFont(fonts["h5"])
painter.setPen(QtGui.QPen(colors["inactive"]))
painter.drawText(families_rect, families)
# Draw checkbox
pen = QtGui.QPen(check_color, 1)
painter.setPen(pen)
if index.data(model.IsOptional):
painter.drawRect(toggle_rect)
if index.data(model.IsChecked):
painter.fillRect(toggle_rect, check_color)
elif not index.data(model.IsIdle) and index.data(model.IsChecked):
painter.fillRect(toggle_rect, check_color)
if option.state & QtWidgets.QStyle.State_MouseOver:
painter.fillRect(body_rect, colors["hover"])
if option.state & QtWidgets.QStyle.State_Selected:
painter.fillRect(body_rect, colors["selected"])
painter.setPen(colors["outline"])
painter.drawRect(body_rect)
# Ok, we're done, tidy up.
painter.restore() | Paint checkbox and text
_________________________________________
| | label | duration |
|toggle |_____________________| |
| | families | |
|_______|_____________________|___________| | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/delegate.py#L152-L264 | null | class Artist(QtWidgets.QStyledItemDelegate):
"""Delegate used on Artist page"""
def sizeHint(self, option, index):
return QtCore.QSize(option.rect.width(), 80)
|
pyblish/pyblish-lite | pyblish_lite/delegate.py | Terminal.paint | python | def paint(self, painter, option, index):
icon_rect = QtCore.QRectF(option.rect).adjusted(3, 3, -3, -3)
icon_rect.setWidth(14)
icon_rect.setHeight(14)
icon_color = colors["idle"]
icon = icons[index.data(model.Type)]
if index.data(model.Type) == "record":
icon_color = record_colors[index.data(model.LogLevel)]
elif index.data(model.Type) == "error":
icon_color = colors["warning"]
metrics = painter.fontMetrics()
label_rect = QtCore.QRectF(option.rect.adjusted(
icon_rect.width() + 12, 2, 0, -2))
assert label_rect.width() > 0
label = index.data(model.Label)
label = metrics.elidedText(label,
QtCore.Qt.ElideRight,
label_rect.width() - 20)
font_color = colors["idle"]
hover = QtGui.QPainterPath()
hover.addRect(QtCore.QRectF(option.rect).adjusted(0, 0, -1, -1))
# Maintain reference to state, so we can restore it once we're done
painter.save()
# Draw label
painter.setFont(fonts["h4"])
painter.setPen(QtGui.QPen(font_color))
painter.drawText(label_rect, label)
# Draw icon
painter.setFont(fonts["smallAwesome"])
painter.setPen(QtGui.QPen(icon_color))
painter.drawText(icon_rect, QtCore.Qt.AlignCenter, icon)
if option.state & QtWidgets.QStyle.State_MouseOver:
painter.fillPath(hover, colors["hover"])
if option.state & QtWidgets.QStyle.State_Selected:
painter.fillPath(hover, colors["selected"])
# Ok, we're done, tidy up.
painter.restore() | Paint text | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/delegate.py#L273-L326 | null | class Terminal(QtWidgets.QStyledItemDelegate):
"""Delegate used exclusively for the Terminal"""
def sizeHint(self, option, index):
return QtCore.QSize(option.rect.width(), 20)
|
pyblish/pyblish-lite | pyblish_lite/model.py | Abstract.append | python | def append(self, item):
self.beginInsertRows(QtCore.QModelIndex(),
self.rowCount(),
self.rowCount())
self.items.append(item)
self.endInsertRows() | Append item to end of model | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/model.py#L101-L108 | null | class Abstract(QtCore.QAbstractListModel):
def __iter__(self):
"""Yield each row of model"""
for index in range(len(self.items)):
yield self.createIndex(index, 0)
def data(self, index, role):
if role == Object:
return self.items[index.row()]
def rowCount(self, parent=None):
return len(self.items)
def reset(self):
self.beginResetModel()
self.items[:] = []
self.endResetModel()
def update_with_result(self, result):
pass
|
pyblish/pyblish-lite | pyblish_lite/model.py | ProxyModel.filterAcceptsRow | python | def filterAcceptsRow(self, source_row, source_parent):
model = self.sourceModel()
item = model.items[source_row]
key = getattr(item, "filter", None)
if key is not None:
regex = self.filterRegExp()
if regex.pattern():
match = regex.indexIn(key)
return False if match == -1 else True
# --- Check if any family assigned to the plugin is in allowed families
for role, values in self.includes.items():
includes_list = [([x] if isinstance(x, (list, tuple)) else x)
for x in getattr(item, role, None)]
return any(include in values for include in includes_list)
for role, values in self.excludes.items():
data = getattr(item, role, None)
if data in values:
return False
return super(ProxyModel, self).filterAcceptsRow(
source_row, source_parent) | Exclude items in `self.excludes` | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/model.py#L636-L659 | null | class ProxyModel(QtCore.QSortFilterProxyModel):
"""A QSortFilterProxyModel with custom exclude and include rules
Role may be either an integer or string, and each
role may include multiple values.
Example:
>>> # Exclude any item whose role 123 equals "Abc"
>>> model = ProxyModel(None)
>>> model.add_exclusion(role=123, value="Abc")
>>> # Exclude multiple values
>>> model.add_exclusion(role="name", value="Pontus")
>>> model.add_exclusion(role="name", value="Richard")
>>> # Exclude amongst includes
>>> model.add_inclusion(role="type", value="PluginItem")
>>> model.add_exclusion(role="name", value="Richard")
"""
def __init__(self, source, parent=None):
super(ProxyModel, self).__init__(parent)
self.setSourceModel(source)
self.excludes = dict()
self.includes = {'families': ['*']}
def item(self, index):
index = self.index(index, 0, QtCore.QModelIndex())
index = self.mapToSource(index)
model = self.sourceModel()
return model.items[index.row()]
def reset(self):
self.beginResetModel()
self.includes = {'families': ['*']}
self.endResetModel()
def add_exclusion(self, role, value):
"""Exclude item if `role` equals `value`
Attributes:
role (int, string): Qt role or name to compare `value` to
value (object): Value to exclude
"""
self._add_rule(self.excludes, role, value)
def remove_exclusion(self, role, value=None):
"""Remove exclusion rule
Arguments:
role (int, string): Qt role or name to remove
value (object, optional): Value to remove. If none
is supplied, the entire role will be removed.
"""
self._remove_rule(self.excludes, role, value)
def set_exclusion(self, rules):
"""Set excludes
Replaces existing excludes with those in `rules`
Arguments:
rules (list): Tuples of (role, value)
"""
self._set_rules(self.excludes, rules)
def clear_exclusion(self):
self._clear_group(self.excludes)
def add_inclusion(self, role, value):
"""Include item if `role` equals `value`
Attributes:
role (int): Qt role to compare `value` to
value (object): Value to exclude
"""
self._add_rule(self.includes, role, value)
def remove_inclusion(self, role, value=None):
"""Remove exclusion rule"""
self._remove_rule(self.includes, role, value)
def set_inclusion(self, rules):
self._set_rules(self.includes, rules)
def clear_inclusion(self):
self._clear_group(self.includes)
def _add_rule(self, group, role, value):
"""Implementation detail"""
if role not in group:
group[role] = list()
group[role].append(value)
self.invalidate()
def _remove_rule(self, group, role, value=None):
"""Implementation detail"""
if role not in group:
return
if value is None:
group.pop(role, None)
else:
group[role].remove(value)
self.invalidate()
def _set_rules(self, group, rules):
"""Implementation detail"""
group.clear()
for rule in rules:
self._add_rule(group, *rule)
self.invalidate()
def _clear_group(self, group):
group.clear()
self.invalidate()
# Overridden methods
def rowCount(self, parent=QtCore.QModelIndex()):
return super(ProxyModel, self).rowCount(parent)
|
pyblish/pyblish-lite | pyblish_lite/window.py | Window.on_item_toggled | python | def on_item_toggled(self, index, state=None):
if not index.data(model.IsIdle):
return self.info("Cannot toggle")
if not index.data(model.IsOptional):
return self.info("This item is mandatory")
if state is None:
state = not index.data(model.IsChecked)
index.model().setData(index, state, model.IsChecked)
# Withdraw option to publish if no instances are toggled
play = self.findChild(QtWidgets.QWidget, "Play")
validate = self.findChild(QtWidgets.QWidget, "Validate")
any_instances = any(index.data(model.IsChecked)
for index in self.data["models"]["instances"])
play.setEnabled(any_instances)
validate.setEnabled(any_instances)
# Emit signals
if index.data(model.Type) == "instance":
instance = self.data["models"]["instances"].items[index.row()]
util.defer(
100, lambda: self.controller.emit_(
signal="instanceToggled",
kwargs={"new_value": state,
"old_value": not state,
"instance": instance}))
if index.data(model.Type) == "plugin":
util.defer(
100, lambda: self.controller.emit_(
signal="pluginToggled",
kwargs={"new_value": state,
"old_value": not state,
"plugin": index.data(model.Object)})) | An item is requesting to be toggled | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/window.py#L622-L659 | null | class Window(QtWidgets.QDialog):
def __init__(self, controller, parent=None):
super(Window, self).__init__(parent)
icon = QtGui.QIcon(util.get_asset("img", "logo-extrasmall.png"))
self.setWindowFlags(self.windowFlags() |
QtCore.Qt.WindowTitleHint |
QtCore.Qt.WindowMaximizeButtonHint |
QtCore.Qt.WindowMinimizeButtonHint |
QtCore.Qt.WindowCloseButtonHint)
self.setWindowIcon(icon)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.controller = controller
"""General layout
__________________ _____________________
| | | | | |
| Header | --> | Tab | Tab | Tab |
|__________________| |_______|_______|_____|
| | _____________________
| | | |
| | | |
| Body | | |
| | --> | Page |
| | | |
| | |_____________________|
|__________________| _____________________
| | | | |
| Footer | | Status | Buttons |
|__________________| |___________|_________|
"""
header = QtWidgets.QWidget()
artist_tab = QtWidgets.QRadioButton()
overview_tab = QtWidgets.QRadioButton()
terminal_tab = QtWidgets.QRadioButton()
spacer = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(header)
layout.addWidget(artist_tab, 0)
layout.addWidget(overview_tab, 0)
layout.addWidget(terminal_tab, 0)
layout.addWidget(spacer, 1) # Compress items to the left
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Artist Page
__________________
| |
| | ------------ |
| | ----- |
| |
| | -------- |
| | ------- |
| |
|__________________|
"""
artist_page = QtWidgets.QWidget()
artist_view = view.Item()
artist_delegate = delegate.Artist()
artist_view.setItemDelegate(artist_delegate)
layout = QtWidgets.QVBoxLayout(artist_page)
layout.addWidget(artist_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Overview Page
___________________
| |
| o ----- o---- |
| o ---- o--- |
| o ---- o---- |
| o ---- o------ |
| |
|__________________|
"""
overview_page = QtWidgets.QWidget()
left_view = view.Item()
right_view = view.Item()
item_delegate = delegate.Item()
left_view.setItemDelegate(item_delegate)
right_view.setItemDelegate(item_delegate)
layout = QtWidgets.QHBoxLayout(overview_page)
layout.addWidget(left_view, 1)
layout.addWidget(right_view, 1)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Terminal
__________________
| |
| \ |
| \ |
| / |
| / ______ |
| |
|__________________|
"""
terminal_container = QtWidgets.QWidget()
terminal_delegate = delegate.Terminal()
terminal_view = view.LogView()
terminal_view.setItemDelegate(terminal_delegate)
layout = QtWidgets.QVBoxLayout(terminal_container)
layout.addWidget(terminal_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
terminal_footer = QtWidgets.QWidget()
search_box = QtWidgets.QLineEdit()
instance_combo = QtWidgets.QComboBox()
plugin_combo = QtWidgets.QComboBox()
show_errors = QtWidgets.QCheckBox()
show_records = QtWidgets.QCheckBox()
show_debug = QtWidgets.QCheckBox()
show_info = QtWidgets.QCheckBox()
show_warning = QtWidgets.QCheckBox()
show_error = QtWidgets.QCheckBox()
show_critical = QtWidgets.QCheckBox()
layout = QtWidgets.QHBoxLayout(terminal_footer)
for w in (search_box,
instance_combo,
plugin_combo,
show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
layout.addWidget(w)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(3)
terminal_page = QtWidgets.QWidget()
layout = QtWidgets.QVBoxLayout(terminal_page)
layout.addWidget(terminal_container)
# layout.addWidget(terminal_footer) # TODO
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
# Add some room between window borders and contents
body = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(body)
layout.setContentsMargins(5, 5, 5, 0)
layout.addWidget(artist_page)
layout.addWidget(overview_page)
layout.addWidget(terminal_page)
"""Comment Box
____________________________
|> My comment |
| |
|____________________________|
"""
comment_box = QtWidgets.QLineEdit()
comment_placeholder = QtWidgets.QLabel(
self.tr("Comment.."), comment_box)
comment_placeholder.move(2, 2)
comment_box.setEnabled(False)
comment_box.hide()
"""Details View
____________________________
| |
| An Item 23 ms |
| - family |
| |
|----------------------------|
| |
| Docstring |
|____________________________|
"""
details = view.Details(self)
"""Footer
______________________
| ___ ___ |
| | o || > ||
| |___||___||
|______________________|
"""
footer = QtWidgets.QWidget()
info = QtWidgets.QLabel()
spacer = QtWidgets.QWidget()
reset = QtWidgets.QPushButton(awesome["refresh"])
validate = QtWidgets.QPushButton(awesome["flask"])
play = QtWidgets.QPushButton(awesome["play"])
stop = QtWidgets.QPushButton(awesome["stop"])
layout = QtWidgets.QHBoxLayout(footer)
layout.setContentsMargins(5, 5, 5, 5)
layout.addWidget(info, 0)
layout.addWidget(spacer, 1)
layout.addWidget(reset, 0)
layout.addWidget(validate, 0)
layout.addWidget(play, 0)
layout.addWidget(stop, 0)
# Placeholder for when GUI is closing
# TODO(marcus): Fade to black and the the user about what's happening
closing_placeholder = QtWidgets.QWidget(self)
closing_placeholder.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
closing_placeholder.hide()
# Main layout
layout = QtWidgets.QVBoxLayout(self)
layout.addWidget(header, 0)
layout.addWidget(body, 3)
layout.addWidget(closing_placeholder, 1)
layout.addWidget(comment_box, 0)
layout.addWidget(footer, 0)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Animation
___
/ \
| | ___
\___/ / \
___ | |
/ \ \___/
| |
\___/
"""
# Display info
info_effect = QtWidgets.QGraphicsOpacityEffect(info)
info.setGraphicsEffect(info_effect)
timeline = QtCore.QSequentialAnimationGroup()
on = QtCore.QPropertyAnimation(info_effect, b"opacity")
on.setDuration(0)
on.setStartValue(0)
on.setEndValue(1)
off = QtCore.QPropertyAnimation(info_effect, b"opacity")
off.setDuration(0)
off.setStartValue(1)
off.setEndValue(0)
fade = QtCore.QPropertyAnimation(info_effect, b"opacity")
fade.setDuration(500)
fade.setStartValue(1.0)
fade.setEndValue(0.0)
timeline.addAnimation(on)
timeline.addPause(50)
timeline.addAnimation(off)
timeline.addPause(50)
timeline.addAnimation(on)
timeline.addPause(2000)
timeline.addAnimation(fade)
info_animation = timeline
"""Setup
Widgets are referred to in CSS via their object-name. We
use the same mechanism internally to refer to objects; so rather
than storing widgets as self.my_widget, it is referred to as:
>>> my_widget = self.findChild(QtWidgets.QWidget, "MyWidget")
This way there is only ever a single method of referring to any widget.
___
| |
/\/ \/\
/ _ \
\ / \ /
| | | |
/ \_/ \
\ /
\/\ /\/
|___|
"""
instance_model = model.Instance()
plugin_model = model.Plugin()
terminal_model = model.Terminal()
filter_model = model.ProxyModel(plugin_model)
artist_view.setModel(instance_model)
left_view.setModel(instance_model)
right_view.setModel(filter_model)
terminal_view.setModel(terminal_model)
instance_combo.setModel(instance_model)
plugin_combo.setModel(plugin_model)
names = {
# Main
"Header": header,
"Body": body,
"Footer": footer,
"Info": info,
# Modals
"Details": details,
# Pages
"Artist": artist_page,
"Overview": overview_page,
"Terminal": terminal_page,
# Tabs
"ArtistTab": artist_tab,
"OverviewTab": overview_tab,
"TerminalTab": terminal_tab,
# Buttons
"Play": play,
"Validate": validate,
"Reset": reset,
"Stop": stop,
# Misc
"CommentBox": comment_box,
"CommentPlaceholder": comment_placeholder,
"ClosingPlaceholder": closing_placeholder,
}
for name, w in names.items():
w.setObjectName(name)
# Enable CSS on plain QWidget objects
for w in (header,
body,
artist_page,
comment_box,
overview_page,
terminal_page,
footer,
play,
validate,
stop,
details,
reset,
closing_placeholder):
w.setAttribute(QtCore.Qt.WA_StyledBackground)
self.data = {
"views": {
"artist": artist_view,
"left": left_view,
"right": right_view,
"terminal": terminal_view,
},
"modals": {
"details": details,
},
"models": {
"instances": instance_model,
"plugins": plugin_model,
"filter": filter_model,
"terminal": terminal_model,
},
"terminal_toggles": {
"record": show_records,
"debug": show_debug,
"info": show_info,
"warning": show_warning,
"error": show_error,
"critical": show_critical
},
"tabs": {
"artist": artist_tab,
"overview": overview_tab,
"terminal": terminal_tab,
"current": "artist"
},
"pages": {
"artist": artist_page,
"overview": overview_page,
"terminal": terminal_page,
},
"buttons": {
"play": play,
"validate": validate,
"stop": stop,
"reset": reset
},
"animation": {
"display_info": info_animation,
},
"state": {
"is_closing": False,
}
}
# Pressing Enter defaults to Play
play.setFocus()
"""Signals
________ ________
|________|-->|________|
|
|
___v____
|________|
"""
artist_tab.toggled.connect(
lambda: self.on_tab_changed("artist"))
overview_tab.toggled.connect(
lambda: self.on_tab_changed("overview"))
terminal_tab.toggled.connect(
lambda: self.on_tab_changed("terminal"))
controller.was_reset.connect(self.on_was_reset)
controller.was_validated.connect(self.on_was_validated)
controller.was_published.connect(self.on_was_published)
controller.was_acted.connect(self.on_was_acted)
controller.was_finished.connect(self.on_finished)
# Discovery happens synchronously during reset, that's
# why it's important that this connection is triggered
# right away.
controller.was_discovered.connect(self.on_was_discovered,
QtCore.Qt.DirectConnection)
# This is called synchronously on each process
controller.was_processed.connect(self.on_was_processed,
QtCore.Qt.DirectConnection)
# NOTE: Listeners to this signal are run in the main thread
controller.about_to_process.connect(self.on_about_to_process,
QtCore.Qt.DirectConnection)
artist_view.toggled.connect(self.on_item_toggled)
left_view.toggled.connect(self.on_item_toggled)
right_view.toggled.connect(self.on_item_toggled)
artist_view.inspected.connect(self.on_item_inspected)
left_view.inspected.connect(self.on_item_inspected)
right_view.inspected.connect(self.on_item_inspected)
terminal_view.inspected.connect(self.on_item_inspected)
reset.clicked.connect(self.on_reset_clicked)
validate.clicked.connect(self.on_validate_clicked)
play.clicked.connect(self.on_play_clicked)
stop.clicked.connect(self.on_stop_clicked)
comment_box.textChanged.connect(self.on_comment_entered)
comment_box.returnPressed.connect(self.on_play_clicked)
right_view.customContextMenuRequested.connect(
self.on_plugin_action_menu_requested)
for box in (show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
box.setChecked(True)
self.data["tabs"][settings.InitialTab].setChecked(True)
# -------------------------------------------------------------------------
#
# Event handlers
#
# -------------------------------------------------------------------------
def on_item_expanded(self, index, state):
if not index.data(model.IsExpandable):
return
if state is None:
state = not index.data(model.Expanded)
# Collapse others
for i in index.model():
index.model().setData(i, False, model.Expanded)
index.model().setData(index, state, model.Expanded)
def on_item_inspected(self, index):
details = self.data["modals"]["details"]
details.move(QtGui.QCursor.pos())
if index.data(model.Type) == "record":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["circle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "LogRecord (%s)" % index.data(model.LogLevel),
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "error":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["exclamation-triangle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "Exception",
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "plugin":
details.show({
"icon": index.data(model.Icon) or awesome["filter"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": index.data(model.Docstring) or "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
elif index.data(model.Type) == "instance":
details.show({
"icon": index.data(model.Icon) or awesome["file"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
def on_tab_changed(self, target):
for page in self.data["pages"].values():
page.hide()
page = self.data["pages"][target]
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
if target == "terminal":
comment_box.hide()
else:
comment_box.setVisible(comment_box.isEnabled())
page.show()
self.data["tabs"]["current"] = target
def on_validate_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.validate()
def on_play_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.publish()
def on_reset_clicked(self):
self.reset()
def on_stop_clicked(self):
self.info("Stopping..")
self.controller.is_running = False
def on_comment_entered(self):
"""The user has typed a comment"""
text_edit = self.findChild(QtWidgets.QWidget, "CommentBox")
comment = text_edit.text()
# Store within context
context = self.controller.context
context.data["comment"] = comment
placeholder = self.findChild(QtWidgets.QLabel, "CommentPlaceholder")
placeholder.setVisible(not comment)
def on_about_to_process(self, plugin, instance):
"""Reflect currently running pair in GUI"""
if instance is not None:
instance_model = self.data["models"]["instances"]
index = instance_model.items.index(instance)
index = instance_model.createIndex(index, 0)
instance_model.setData(index, True, model.IsProcessing)
plugin_model = self.data["models"]["plugins"]
index = plugin_model.items.index(plugin)
index = plugin_model.createIndex(index, 0)
plugin_model.setData(index, True, model.IsProcessing)
self.info("%s %s" % (self.tr("Processing"), index.data(model.Label)))
def on_plugin_action_menu_requested(self, pos):
"""The user right-clicked on a plug-in
__________
| |
| Action 1 |
| Action 2 |
| Action 3 |
| |
|__________|
"""
index = self.data["views"]["right"].indexAt(pos)
actions = index.data(model.Actions)
if not actions:
return
menu = QtWidgets.QMenu(self)
plugins_index = self.data["models"]["filter"].mapToSource(index)
plugin = self.data["models"]["plugins"].items[plugins_index.row()]
print("plugin is: %s" % plugin)
for action in actions:
qaction = QtWidgets.QAction(action.label or action.__name__, self)
qaction.triggered.connect(partial(self.act, plugin, action))
menu.addAction(qaction)
menu.popup(self.data["views"]["right"].viewport().mapToGlobal(pos))
def on_was_discovered(self):
models = self.data["models"]
for Plugin in self.controller.plugins:
models["plugins"].append(Plugin)
def on_was_reset(self):
models = self.data["models"]
self.info(self.tr("Finishing up reset.."))
models["instances"].reset()
for instance in self.controller.context:
models["instances"].append(instance)
buttons = self.data["buttons"]
buttons["play"].show()
buttons["validate"].show()
buttons["reset"].show()
buttons["stop"].hide()
models["instances"].restore_checkstate()
models["plugins"].restore_checkstate()
# Append placeholder comment from Context
# This allows users to inject a comment from elsewhere,
# or to perhaps provide a placeholder comment/template
# for artists to fill in.
comment = self.controller.context.data.get("comment")
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setText(comment or None)
comment_box.setEnabled(comment is not None)
# Refresh tab
self.on_tab_changed(self.data["tabs"]["current"])
self.controller.current_error = None
self.on_finished()
def on_was_validated(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["play"].show()
buttons["stop"].hide()
self.on_finished()
def on_was_published(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
self.on_finished()
def on_was_processed(self, result):
models = self.data["models"]
for instance in self.controller.context:
if instance.id not in models["instances"].ids:
models["instances"].append(instance)
family = instance.data["family"]
if family:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=family)
families = instance.data.get("families")
if families:
for f in families:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=f)
models["plugins"].update_with_result(result)
models["instances"].update_with_result(result)
models["terminal"].update_with_result(result)
def on_was_acted(self, result):
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
# Update action with result
model_ = self.data["models"]["plugins"]
index = model_.items.index(result["plugin"])
index = model_.createIndex(index, 0)
model_.setData(index, not result["success"], model.ActionFailed)
model_.setData(index, False, model.IsProcessing)
models = self.data["models"]
models["terminal"].update_with_result(result)
self.on_finished()
def on_finished(self):
"""Finished signal handler"""
self.controller.is_running = False
error = self.controller.current_error
if error is not None:
self.info(self.tr("Stopped due to error(s), see Terminal."))
else:
self.info(self.tr("Finished successfully!"))
# -------------------------------------------------------------------------
#
# Functions
#
# -------------------------------------------------------------------------
def reset(self):
"""Prepare GUI for reset"""
self.info(self.tr("About to reset.."))
models = self.data["models"]
models["instances"].store_checkstate()
models["plugins"].store_checkstate()
# Reset current ids to secure no previous instances get mixed in.
models["instances"].ids = []
for m in models.values():
m.reset()
for b in self.data["buttons"].values():
b.hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
util.defer(500, self.controller.reset)
def validate(self):
self.info(self.tr("Preparing validate.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.validate)
def publish(self):
self.info(self.tr("Preparing publish.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.publish)
def act(self, plugin, action):
self.info("%s %s.." % (self.tr("Preparing"), action))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
self.controller.is_running = True
# Cause view to update, but it won't visually
# happen until Qt is given time to idle..
model_ = self.data["models"]["plugins"]
index = model_.items.index(plugin)
index = model_.createIndex(index, 0)
for key, value in {model.ActionIdle: False,
model.ActionFailed: False,
model.IsProcessing: True}.items():
model_.setData(index, value, key)
# Give Qt time to draw
util.defer(100, lambda: self.controller.act(plugin, action))
self.info(self.tr("Action prepared."))
def closeEvent(self, event):
"""Perform post-flight checks before closing
Make sure processing of any kind is wrapped up before closing
"""
# Make it snappy, but take care to clean it all up.
# TODO(marcus): Enable GUI to return on problem, such
# as asking whether or not the user really wants to quit
# given there are things currently running.
self.hide()
if self.data["state"]["is_closing"]:
# Explicitly clear potentially referenced data
self.info(self.tr("Cleaning up models.."))
for v in self.data["views"].values():
v.model().deleteLater()
v.setModel(None)
self.info(self.tr("Cleaning up terminal.."))
for item in self.data["models"]["terminal"].items:
del(item)
self.info(self.tr("Cleaning up controller.."))
self.controller.cleanup()
self.info(self.tr("All clean!"))
self.info(self.tr("Good bye"))
return super(Window, self).closeEvent(event)
self.info(self.tr("Closing.."))
def on_problem():
self.heads_up("Warning", "Had trouble closing down. "
"Please tell someone and try again.")
self.show()
if self.controller.is_running:
self.info(self.tr("..as soon as processing is finished.."))
self.controller.is_running = False
self.finished.connect(self.close)
util.defer(2000, on_problem)
return event.ignore()
self.data["state"]["is_closing"] = True
util.defer(200, self.close)
return event.ignore()
def reject(self):
"""Handle ESC key"""
if self.controller.is_running:
self.info(self.tr("Stopping.."))
self.controller.is_running = False
# -------------------------------------------------------------------------
#
# Feedback
#
# -------------------------------------------------------------------------
def info(self, message):
"""Print user-facing information
Arguments:
message (str): Text message for the user
"""
info = self.findChild(QtWidgets.QLabel, "Info")
info.setText(message)
# Include message in terminal
self.data["models"]["terminal"].append({
"label": message,
"type": "info"
})
animation = self.data["animation"]["display_info"]
animation.stop()
animation.start()
# TODO(marcus): Should this be configurable? Do we want
# the shell to fill up with these messages?
util.u_print(message)
def warning(self, message):
"""Block processing and print warning until user hits "Continue"
Arguments:
message (str): Message to display
"""
# TODO(marcus): Implement this.
self.info(message)
def heads_up(self, title, message, command=None):
"""Provide a front-and-center message with optional command
Arguments:
title (str): Bold and short message
message (str): Extended message
command (optional, callable): Function is provided as a button
"""
# TODO(marcus): Implement this.
self.info(message)
|
pyblish/pyblish-lite | pyblish_lite/window.py | Window.on_comment_entered | python | def on_comment_entered(self):
text_edit = self.findChild(QtWidgets.QWidget, "CommentBox")
comment = text_edit.text()
# Store within context
context = self.controller.context
context.data["comment"] = comment
placeholder = self.findChild(QtWidgets.QLabel, "CommentPlaceholder")
placeholder.setVisible(not comment) | The user has typed a comment | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/window.py#L697-L707 | null | class Window(QtWidgets.QDialog):
def __init__(self, controller, parent=None):
super(Window, self).__init__(parent)
icon = QtGui.QIcon(util.get_asset("img", "logo-extrasmall.png"))
self.setWindowFlags(self.windowFlags() |
QtCore.Qt.WindowTitleHint |
QtCore.Qt.WindowMaximizeButtonHint |
QtCore.Qt.WindowMinimizeButtonHint |
QtCore.Qt.WindowCloseButtonHint)
self.setWindowIcon(icon)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.controller = controller
"""General layout
__________________ _____________________
| | | | | |
| Header | --> | Tab | Tab | Tab |
|__________________| |_______|_______|_____|
| | _____________________
| | | |
| | | |
| Body | | |
| | --> | Page |
| | | |
| | |_____________________|
|__________________| _____________________
| | | | |
| Footer | | Status | Buttons |
|__________________| |___________|_________|
"""
header = QtWidgets.QWidget()
artist_tab = QtWidgets.QRadioButton()
overview_tab = QtWidgets.QRadioButton()
terminal_tab = QtWidgets.QRadioButton()
spacer = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(header)
layout.addWidget(artist_tab, 0)
layout.addWidget(overview_tab, 0)
layout.addWidget(terminal_tab, 0)
layout.addWidget(spacer, 1) # Compress items to the left
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Artist Page
__________________
| |
| | ------------ |
| | ----- |
| |
| | -------- |
| | ------- |
| |
|__________________|
"""
artist_page = QtWidgets.QWidget()
artist_view = view.Item()
artist_delegate = delegate.Artist()
artist_view.setItemDelegate(artist_delegate)
layout = QtWidgets.QVBoxLayout(artist_page)
layout.addWidget(artist_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Overview Page
___________________
| |
| o ----- o---- |
| o ---- o--- |
| o ---- o---- |
| o ---- o------ |
| |
|__________________|
"""
overview_page = QtWidgets.QWidget()
left_view = view.Item()
right_view = view.Item()
item_delegate = delegate.Item()
left_view.setItemDelegate(item_delegate)
right_view.setItemDelegate(item_delegate)
layout = QtWidgets.QHBoxLayout(overview_page)
layout.addWidget(left_view, 1)
layout.addWidget(right_view, 1)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Terminal
__________________
| |
| \ |
| \ |
| / |
| / ______ |
| |
|__________________|
"""
terminal_container = QtWidgets.QWidget()
terminal_delegate = delegate.Terminal()
terminal_view = view.LogView()
terminal_view.setItemDelegate(terminal_delegate)
layout = QtWidgets.QVBoxLayout(terminal_container)
layout.addWidget(terminal_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
terminal_footer = QtWidgets.QWidget()
search_box = QtWidgets.QLineEdit()
instance_combo = QtWidgets.QComboBox()
plugin_combo = QtWidgets.QComboBox()
show_errors = QtWidgets.QCheckBox()
show_records = QtWidgets.QCheckBox()
show_debug = QtWidgets.QCheckBox()
show_info = QtWidgets.QCheckBox()
show_warning = QtWidgets.QCheckBox()
show_error = QtWidgets.QCheckBox()
show_critical = QtWidgets.QCheckBox()
layout = QtWidgets.QHBoxLayout(terminal_footer)
for w in (search_box,
instance_combo,
plugin_combo,
show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
layout.addWidget(w)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(3)
terminal_page = QtWidgets.QWidget()
layout = QtWidgets.QVBoxLayout(terminal_page)
layout.addWidget(terminal_container)
# layout.addWidget(terminal_footer) # TODO
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
# Add some room between window borders and contents
body = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(body)
layout.setContentsMargins(5, 5, 5, 0)
layout.addWidget(artist_page)
layout.addWidget(overview_page)
layout.addWidget(terminal_page)
"""Comment Box
____________________________
|> My comment |
| |
|____________________________|
"""
comment_box = QtWidgets.QLineEdit()
comment_placeholder = QtWidgets.QLabel(
self.tr("Comment.."), comment_box)
comment_placeholder.move(2, 2)
comment_box.setEnabled(False)
comment_box.hide()
"""Details View
____________________________
| |
| An Item 23 ms |
| - family |
| |
|----------------------------|
| |
| Docstring |
|____________________________|
"""
details = view.Details(self)
"""Footer
______________________
| ___ ___ |
| | o || > ||
| |___||___||
|______________________|
"""
footer = QtWidgets.QWidget()
info = QtWidgets.QLabel()
spacer = QtWidgets.QWidget()
reset = QtWidgets.QPushButton(awesome["refresh"])
validate = QtWidgets.QPushButton(awesome["flask"])
play = QtWidgets.QPushButton(awesome["play"])
stop = QtWidgets.QPushButton(awesome["stop"])
layout = QtWidgets.QHBoxLayout(footer)
layout.setContentsMargins(5, 5, 5, 5)
layout.addWidget(info, 0)
layout.addWidget(spacer, 1)
layout.addWidget(reset, 0)
layout.addWidget(validate, 0)
layout.addWidget(play, 0)
layout.addWidget(stop, 0)
# Placeholder for when GUI is closing
# TODO(marcus): Fade to black and the the user about what's happening
closing_placeholder = QtWidgets.QWidget(self)
closing_placeholder.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
closing_placeholder.hide()
# Main layout
layout = QtWidgets.QVBoxLayout(self)
layout.addWidget(header, 0)
layout.addWidget(body, 3)
layout.addWidget(closing_placeholder, 1)
layout.addWidget(comment_box, 0)
layout.addWidget(footer, 0)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Animation
___
/ \
| | ___
\___/ / \
___ | |
/ \ \___/
| |
\___/
"""
# Display info
info_effect = QtWidgets.QGraphicsOpacityEffect(info)
info.setGraphicsEffect(info_effect)
timeline = QtCore.QSequentialAnimationGroup()
on = QtCore.QPropertyAnimation(info_effect, b"opacity")
on.setDuration(0)
on.setStartValue(0)
on.setEndValue(1)
off = QtCore.QPropertyAnimation(info_effect, b"opacity")
off.setDuration(0)
off.setStartValue(1)
off.setEndValue(0)
fade = QtCore.QPropertyAnimation(info_effect, b"opacity")
fade.setDuration(500)
fade.setStartValue(1.0)
fade.setEndValue(0.0)
timeline.addAnimation(on)
timeline.addPause(50)
timeline.addAnimation(off)
timeline.addPause(50)
timeline.addAnimation(on)
timeline.addPause(2000)
timeline.addAnimation(fade)
info_animation = timeline
"""Setup
Widgets are referred to in CSS via their object-name. We
use the same mechanism internally to refer to objects; so rather
than storing widgets as self.my_widget, it is referred to as:
>>> my_widget = self.findChild(QtWidgets.QWidget, "MyWidget")
This way there is only ever a single method of referring to any widget.
___
| |
/\/ \/\
/ _ \
\ / \ /
| | | |
/ \_/ \
\ /
\/\ /\/
|___|
"""
instance_model = model.Instance()
plugin_model = model.Plugin()
terminal_model = model.Terminal()
filter_model = model.ProxyModel(plugin_model)
artist_view.setModel(instance_model)
left_view.setModel(instance_model)
right_view.setModel(filter_model)
terminal_view.setModel(terminal_model)
instance_combo.setModel(instance_model)
plugin_combo.setModel(plugin_model)
names = {
# Main
"Header": header,
"Body": body,
"Footer": footer,
"Info": info,
# Modals
"Details": details,
# Pages
"Artist": artist_page,
"Overview": overview_page,
"Terminal": terminal_page,
# Tabs
"ArtistTab": artist_tab,
"OverviewTab": overview_tab,
"TerminalTab": terminal_tab,
# Buttons
"Play": play,
"Validate": validate,
"Reset": reset,
"Stop": stop,
# Misc
"CommentBox": comment_box,
"CommentPlaceholder": comment_placeholder,
"ClosingPlaceholder": closing_placeholder,
}
for name, w in names.items():
w.setObjectName(name)
# Enable CSS on plain QWidget objects
for w in (header,
body,
artist_page,
comment_box,
overview_page,
terminal_page,
footer,
play,
validate,
stop,
details,
reset,
closing_placeholder):
w.setAttribute(QtCore.Qt.WA_StyledBackground)
self.data = {
"views": {
"artist": artist_view,
"left": left_view,
"right": right_view,
"terminal": terminal_view,
},
"modals": {
"details": details,
},
"models": {
"instances": instance_model,
"plugins": plugin_model,
"filter": filter_model,
"terminal": terminal_model,
},
"terminal_toggles": {
"record": show_records,
"debug": show_debug,
"info": show_info,
"warning": show_warning,
"error": show_error,
"critical": show_critical
},
"tabs": {
"artist": artist_tab,
"overview": overview_tab,
"terminal": terminal_tab,
"current": "artist"
},
"pages": {
"artist": artist_page,
"overview": overview_page,
"terminal": terminal_page,
},
"buttons": {
"play": play,
"validate": validate,
"stop": stop,
"reset": reset
},
"animation": {
"display_info": info_animation,
},
"state": {
"is_closing": False,
}
}
# Pressing Enter defaults to Play
play.setFocus()
"""Signals
________ ________
|________|-->|________|
|
|
___v____
|________|
"""
artist_tab.toggled.connect(
lambda: self.on_tab_changed("artist"))
overview_tab.toggled.connect(
lambda: self.on_tab_changed("overview"))
terminal_tab.toggled.connect(
lambda: self.on_tab_changed("terminal"))
controller.was_reset.connect(self.on_was_reset)
controller.was_validated.connect(self.on_was_validated)
controller.was_published.connect(self.on_was_published)
controller.was_acted.connect(self.on_was_acted)
controller.was_finished.connect(self.on_finished)
# Discovery happens synchronously during reset, that's
# why it's important that this connection is triggered
# right away.
controller.was_discovered.connect(self.on_was_discovered,
QtCore.Qt.DirectConnection)
# This is called synchronously on each process
controller.was_processed.connect(self.on_was_processed,
QtCore.Qt.DirectConnection)
# NOTE: Listeners to this signal are run in the main thread
controller.about_to_process.connect(self.on_about_to_process,
QtCore.Qt.DirectConnection)
artist_view.toggled.connect(self.on_item_toggled)
left_view.toggled.connect(self.on_item_toggled)
right_view.toggled.connect(self.on_item_toggled)
artist_view.inspected.connect(self.on_item_inspected)
left_view.inspected.connect(self.on_item_inspected)
right_view.inspected.connect(self.on_item_inspected)
terminal_view.inspected.connect(self.on_item_inspected)
reset.clicked.connect(self.on_reset_clicked)
validate.clicked.connect(self.on_validate_clicked)
play.clicked.connect(self.on_play_clicked)
stop.clicked.connect(self.on_stop_clicked)
comment_box.textChanged.connect(self.on_comment_entered)
comment_box.returnPressed.connect(self.on_play_clicked)
right_view.customContextMenuRequested.connect(
self.on_plugin_action_menu_requested)
for box in (show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
box.setChecked(True)
self.data["tabs"][settings.InitialTab].setChecked(True)
# -------------------------------------------------------------------------
#
# Event handlers
#
# -------------------------------------------------------------------------
def on_item_expanded(self, index, state):
if not index.data(model.IsExpandable):
return
if state is None:
state = not index.data(model.Expanded)
# Collapse others
for i in index.model():
index.model().setData(i, False, model.Expanded)
index.model().setData(index, state, model.Expanded)
def on_item_inspected(self, index):
details = self.data["modals"]["details"]
details.move(QtGui.QCursor.pos())
if index.data(model.Type) == "record":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["circle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "LogRecord (%s)" % index.data(model.LogLevel),
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "error":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["exclamation-triangle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "Exception",
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "plugin":
details.show({
"icon": index.data(model.Icon) or awesome["filter"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": index.data(model.Docstring) or "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
elif index.data(model.Type) == "instance":
details.show({
"icon": index.data(model.Icon) or awesome["file"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
def on_item_toggled(self, index, state=None):
"""An item is requesting to be toggled"""
if not index.data(model.IsIdle):
return self.info("Cannot toggle")
if not index.data(model.IsOptional):
return self.info("This item is mandatory")
if state is None:
state = not index.data(model.IsChecked)
index.model().setData(index, state, model.IsChecked)
# Withdraw option to publish if no instances are toggled
play = self.findChild(QtWidgets.QWidget, "Play")
validate = self.findChild(QtWidgets.QWidget, "Validate")
any_instances = any(index.data(model.IsChecked)
for index in self.data["models"]["instances"])
play.setEnabled(any_instances)
validate.setEnabled(any_instances)
# Emit signals
if index.data(model.Type) == "instance":
instance = self.data["models"]["instances"].items[index.row()]
util.defer(
100, lambda: self.controller.emit_(
signal="instanceToggled",
kwargs={"new_value": state,
"old_value": not state,
"instance": instance}))
if index.data(model.Type) == "plugin":
util.defer(
100, lambda: self.controller.emit_(
signal="pluginToggled",
kwargs={"new_value": state,
"old_value": not state,
"plugin": index.data(model.Object)}))
def on_tab_changed(self, target):
for page in self.data["pages"].values():
page.hide()
page = self.data["pages"][target]
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
if target == "terminal":
comment_box.hide()
else:
comment_box.setVisible(comment_box.isEnabled())
page.show()
self.data["tabs"]["current"] = target
def on_validate_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.validate()
def on_play_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.publish()
def on_reset_clicked(self):
self.reset()
def on_stop_clicked(self):
self.info("Stopping..")
self.controller.is_running = False
def on_about_to_process(self, plugin, instance):
"""Reflect currently running pair in GUI"""
if instance is not None:
instance_model = self.data["models"]["instances"]
index = instance_model.items.index(instance)
index = instance_model.createIndex(index, 0)
instance_model.setData(index, True, model.IsProcessing)
plugin_model = self.data["models"]["plugins"]
index = plugin_model.items.index(plugin)
index = plugin_model.createIndex(index, 0)
plugin_model.setData(index, True, model.IsProcessing)
self.info("%s %s" % (self.tr("Processing"), index.data(model.Label)))
def on_plugin_action_menu_requested(self, pos):
"""The user right-clicked on a plug-in
__________
| |
| Action 1 |
| Action 2 |
| Action 3 |
| |
|__________|
"""
index = self.data["views"]["right"].indexAt(pos)
actions = index.data(model.Actions)
if not actions:
return
menu = QtWidgets.QMenu(self)
plugins_index = self.data["models"]["filter"].mapToSource(index)
plugin = self.data["models"]["plugins"].items[plugins_index.row()]
print("plugin is: %s" % plugin)
for action in actions:
qaction = QtWidgets.QAction(action.label or action.__name__, self)
qaction.triggered.connect(partial(self.act, plugin, action))
menu.addAction(qaction)
menu.popup(self.data["views"]["right"].viewport().mapToGlobal(pos))
def on_was_discovered(self):
models = self.data["models"]
for Plugin in self.controller.plugins:
models["plugins"].append(Plugin)
def on_was_reset(self):
models = self.data["models"]
self.info(self.tr("Finishing up reset.."))
models["instances"].reset()
for instance in self.controller.context:
models["instances"].append(instance)
buttons = self.data["buttons"]
buttons["play"].show()
buttons["validate"].show()
buttons["reset"].show()
buttons["stop"].hide()
models["instances"].restore_checkstate()
models["plugins"].restore_checkstate()
# Append placeholder comment from Context
# This allows users to inject a comment from elsewhere,
# or to perhaps provide a placeholder comment/template
# for artists to fill in.
comment = self.controller.context.data.get("comment")
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setText(comment or None)
comment_box.setEnabled(comment is not None)
# Refresh tab
self.on_tab_changed(self.data["tabs"]["current"])
self.controller.current_error = None
self.on_finished()
def on_was_validated(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["play"].show()
buttons["stop"].hide()
self.on_finished()
def on_was_published(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
self.on_finished()
def on_was_processed(self, result):
models = self.data["models"]
for instance in self.controller.context:
if instance.id not in models["instances"].ids:
models["instances"].append(instance)
family = instance.data["family"]
if family:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=family)
families = instance.data.get("families")
if families:
for f in families:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=f)
models["plugins"].update_with_result(result)
models["instances"].update_with_result(result)
models["terminal"].update_with_result(result)
def on_was_acted(self, result):
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
# Update action with result
model_ = self.data["models"]["plugins"]
index = model_.items.index(result["plugin"])
index = model_.createIndex(index, 0)
model_.setData(index, not result["success"], model.ActionFailed)
model_.setData(index, False, model.IsProcessing)
models = self.data["models"]
models["terminal"].update_with_result(result)
self.on_finished()
def on_finished(self):
"""Finished signal handler"""
self.controller.is_running = False
error = self.controller.current_error
if error is not None:
self.info(self.tr("Stopped due to error(s), see Terminal."))
else:
self.info(self.tr("Finished successfully!"))
# -------------------------------------------------------------------------
#
# Functions
#
# -------------------------------------------------------------------------
def reset(self):
"""Prepare GUI for reset"""
self.info(self.tr("About to reset.."))
models = self.data["models"]
models["instances"].store_checkstate()
models["plugins"].store_checkstate()
# Reset current ids to secure no previous instances get mixed in.
models["instances"].ids = []
for m in models.values():
m.reset()
for b in self.data["buttons"].values():
b.hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
util.defer(500, self.controller.reset)
def validate(self):
self.info(self.tr("Preparing validate.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.validate)
def publish(self):
self.info(self.tr("Preparing publish.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.publish)
def act(self, plugin, action):
self.info("%s %s.." % (self.tr("Preparing"), action))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
self.controller.is_running = True
# Cause view to update, but it won't visually
# happen until Qt is given time to idle..
model_ = self.data["models"]["plugins"]
index = model_.items.index(plugin)
index = model_.createIndex(index, 0)
for key, value in {model.ActionIdle: False,
model.ActionFailed: False,
model.IsProcessing: True}.items():
model_.setData(index, value, key)
# Give Qt time to draw
util.defer(100, lambda: self.controller.act(plugin, action))
self.info(self.tr("Action prepared."))
def closeEvent(self, event):
"""Perform post-flight checks before closing
Make sure processing of any kind is wrapped up before closing
"""
# Make it snappy, but take care to clean it all up.
# TODO(marcus): Enable GUI to return on problem, such
# as asking whether or not the user really wants to quit
# given there are things currently running.
self.hide()
if self.data["state"]["is_closing"]:
# Explicitly clear potentially referenced data
self.info(self.tr("Cleaning up models.."))
for v in self.data["views"].values():
v.model().deleteLater()
v.setModel(None)
self.info(self.tr("Cleaning up terminal.."))
for item in self.data["models"]["terminal"].items:
del(item)
self.info(self.tr("Cleaning up controller.."))
self.controller.cleanup()
self.info(self.tr("All clean!"))
self.info(self.tr("Good bye"))
return super(Window, self).closeEvent(event)
self.info(self.tr("Closing.."))
def on_problem():
self.heads_up("Warning", "Had trouble closing down. "
"Please tell someone and try again.")
self.show()
if self.controller.is_running:
self.info(self.tr("..as soon as processing is finished.."))
self.controller.is_running = False
self.finished.connect(self.close)
util.defer(2000, on_problem)
return event.ignore()
self.data["state"]["is_closing"] = True
util.defer(200, self.close)
return event.ignore()
def reject(self):
"""Handle ESC key"""
if self.controller.is_running:
self.info(self.tr("Stopping.."))
self.controller.is_running = False
# -------------------------------------------------------------------------
#
# Feedback
#
# -------------------------------------------------------------------------
def info(self, message):
"""Print user-facing information
Arguments:
message (str): Text message for the user
"""
info = self.findChild(QtWidgets.QLabel, "Info")
info.setText(message)
# Include message in terminal
self.data["models"]["terminal"].append({
"label": message,
"type": "info"
})
animation = self.data["animation"]["display_info"]
animation.stop()
animation.start()
# TODO(marcus): Should this be configurable? Do we want
# the shell to fill up with these messages?
util.u_print(message)
def warning(self, message):
"""Block processing and print warning until user hits "Continue"
Arguments:
message (str): Message to display
"""
# TODO(marcus): Implement this.
self.info(message)
def heads_up(self, title, message, command=None):
"""Provide a front-and-center message with optional command
Arguments:
title (str): Bold and short message
message (str): Extended message
command (optional, callable): Function is provided as a button
"""
# TODO(marcus): Implement this.
self.info(message)
|
pyblish/pyblish-lite | pyblish_lite/window.py | Window.on_about_to_process | python | def on_about_to_process(self, plugin, instance):
if instance is not None:
instance_model = self.data["models"]["instances"]
index = instance_model.items.index(instance)
index = instance_model.createIndex(index, 0)
instance_model.setData(index, True, model.IsProcessing)
plugin_model = self.data["models"]["plugins"]
index = plugin_model.items.index(plugin)
index = plugin_model.createIndex(index, 0)
plugin_model.setData(index, True, model.IsProcessing)
self.info("%s %s" % (self.tr("Processing"), index.data(model.Label))) | Reflect currently running pair in GUI | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/window.py#L709-L722 | null | class Window(QtWidgets.QDialog):
def __init__(self, controller, parent=None):
super(Window, self).__init__(parent)
icon = QtGui.QIcon(util.get_asset("img", "logo-extrasmall.png"))
self.setWindowFlags(self.windowFlags() |
QtCore.Qt.WindowTitleHint |
QtCore.Qt.WindowMaximizeButtonHint |
QtCore.Qt.WindowMinimizeButtonHint |
QtCore.Qt.WindowCloseButtonHint)
self.setWindowIcon(icon)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.controller = controller
"""General layout
__________________ _____________________
| | | | | |
| Header | --> | Tab | Tab | Tab |
|__________________| |_______|_______|_____|
| | _____________________
| | | |
| | | |
| Body | | |
| | --> | Page |
| | | |
| | |_____________________|
|__________________| _____________________
| | | | |
| Footer | | Status | Buttons |
|__________________| |___________|_________|
"""
header = QtWidgets.QWidget()
artist_tab = QtWidgets.QRadioButton()
overview_tab = QtWidgets.QRadioButton()
terminal_tab = QtWidgets.QRadioButton()
spacer = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(header)
layout.addWidget(artist_tab, 0)
layout.addWidget(overview_tab, 0)
layout.addWidget(terminal_tab, 0)
layout.addWidget(spacer, 1) # Compress items to the left
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Artist Page
__________________
| |
| | ------------ |
| | ----- |
| |
| | -------- |
| | ------- |
| |
|__________________|
"""
artist_page = QtWidgets.QWidget()
artist_view = view.Item()
artist_delegate = delegate.Artist()
artist_view.setItemDelegate(artist_delegate)
layout = QtWidgets.QVBoxLayout(artist_page)
layout.addWidget(artist_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Overview Page
___________________
| |
| o ----- o---- |
| o ---- o--- |
| o ---- o---- |
| o ---- o------ |
| |
|__________________|
"""
overview_page = QtWidgets.QWidget()
left_view = view.Item()
right_view = view.Item()
item_delegate = delegate.Item()
left_view.setItemDelegate(item_delegate)
right_view.setItemDelegate(item_delegate)
layout = QtWidgets.QHBoxLayout(overview_page)
layout.addWidget(left_view, 1)
layout.addWidget(right_view, 1)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Terminal
__________________
| |
| \ |
| \ |
| / |
| / ______ |
| |
|__________________|
"""
terminal_container = QtWidgets.QWidget()
terminal_delegate = delegate.Terminal()
terminal_view = view.LogView()
terminal_view.setItemDelegate(terminal_delegate)
layout = QtWidgets.QVBoxLayout(terminal_container)
layout.addWidget(terminal_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
terminal_footer = QtWidgets.QWidget()
search_box = QtWidgets.QLineEdit()
instance_combo = QtWidgets.QComboBox()
plugin_combo = QtWidgets.QComboBox()
show_errors = QtWidgets.QCheckBox()
show_records = QtWidgets.QCheckBox()
show_debug = QtWidgets.QCheckBox()
show_info = QtWidgets.QCheckBox()
show_warning = QtWidgets.QCheckBox()
show_error = QtWidgets.QCheckBox()
show_critical = QtWidgets.QCheckBox()
layout = QtWidgets.QHBoxLayout(terminal_footer)
for w in (search_box,
instance_combo,
plugin_combo,
show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
layout.addWidget(w)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(3)
terminal_page = QtWidgets.QWidget()
layout = QtWidgets.QVBoxLayout(terminal_page)
layout.addWidget(terminal_container)
# layout.addWidget(terminal_footer) # TODO
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
# Add some room between window borders and contents
body = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(body)
layout.setContentsMargins(5, 5, 5, 0)
layout.addWidget(artist_page)
layout.addWidget(overview_page)
layout.addWidget(terminal_page)
"""Comment Box
____________________________
|> My comment |
| |
|____________________________|
"""
comment_box = QtWidgets.QLineEdit()
comment_placeholder = QtWidgets.QLabel(
self.tr("Comment.."), comment_box)
comment_placeholder.move(2, 2)
comment_box.setEnabled(False)
comment_box.hide()
"""Details View
____________________________
| |
| An Item 23 ms |
| - family |
| |
|----------------------------|
| |
| Docstring |
|____________________________|
"""
details = view.Details(self)
"""Footer
______________________
| ___ ___ |
| | o || > ||
| |___||___||
|______________________|
"""
footer = QtWidgets.QWidget()
info = QtWidgets.QLabel()
spacer = QtWidgets.QWidget()
reset = QtWidgets.QPushButton(awesome["refresh"])
validate = QtWidgets.QPushButton(awesome["flask"])
play = QtWidgets.QPushButton(awesome["play"])
stop = QtWidgets.QPushButton(awesome["stop"])
layout = QtWidgets.QHBoxLayout(footer)
layout.setContentsMargins(5, 5, 5, 5)
layout.addWidget(info, 0)
layout.addWidget(spacer, 1)
layout.addWidget(reset, 0)
layout.addWidget(validate, 0)
layout.addWidget(play, 0)
layout.addWidget(stop, 0)
# Placeholder for when GUI is closing
# TODO(marcus): Fade to black and the the user about what's happening
closing_placeholder = QtWidgets.QWidget(self)
closing_placeholder.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
closing_placeholder.hide()
# Main layout
layout = QtWidgets.QVBoxLayout(self)
layout.addWidget(header, 0)
layout.addWidget(body, 3)
layout.addWidget(closing_placeholder, 1)
layout.addWidget(comment_box, 0)
layout.addWidget(footer, 0)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Animation
___
/ \
| | ___
\___/ / \
___ | |
/ \ \___/
| |
\___/
"""
# Display info
info_effect = QtWidgets.QGraphicsOpacityEffect(info)
info.setGraphicsEffect(info_effect)
timeline = QtCore.QSequentialAnimationGroup()
on = QtCore.QPropertyAnimation(info_effect, b"opacity")
on.setDuration(0)
on.setStartValue(0)
on.setEndValue(1)
off = QtCore.QPropertyAnimation(info_effect, b"opacity")
off.setDuration(0)
off.setStartValue(1)
off.setEndValue(0)
fade = QtCore.QPropertyAnimation(info_effect, b"opacity")
fade.setDuration(500)
fade.setStartValue(1.0)
fade.setEndValue(0.0)
timeline.addAnimation(on)
timeline.addPause(50)
timeline.addAnimation(off)
timeline.addPause(50)
timeline.addAnimation(on)
timeline.addPause(2000)
timeline.addAnimation(fade)
info_animation = timeline
"""Setup
Widgets are referred to in CSS via their object-name. We
use the same mechanism internally to refer to objects; so rather
than storing widgets as self.my_widget, it is referred to as:
>>> my_widget = self.findChild(QtWidgets.QWidget, "MyWidget")
This way there is only ever a single method of referring to any widget.
___
| |
/\/ \/\
/ _ \
\ / \ /
| | | |
/ \_/ \
\ /
\/\ /\/
|___|
"""
instance_model = model.Instance()
plugin_model = model.Plugin()
terminal_model = model.Terminal()
filter_model = model.ProxyModel(plugin_model)
artist_view.setModel(instance_model)
left_view.setModel(instance_model)
right_view.setModel(filter_model)
terminal_view.setModel(terminal_model)
instance_combo.setModel(instance_model)
plugin_combo.setModel(plugin_model)
names = {
# Main
"Header": header,
"Body": body,
"Footer": footer,
"Info": info,
# Modals
"Details": details,
# Pages
"Artist": artist_page,
"Overview": overview_page,
"Terminal": terminal_page,
# Tabs
"ArtistTab": artist_tab,
"OverviewTab": overview_tab,
"TerminalTab": terminal_tab,
# Buttons
"Play": play,
"Validate": validate,
"Reset": reset,
"Stop": stop,
# Misc
"CommentBox": comment_box,
"CommentPlaceholder": comment_placeholder,
"ClosingPlaceholder": closing_placeholder,
}
for name, w in names.items():
w.setObjectName(name)
# Enable CSS on plain QWidget objects
for w in (header,
body,
artist_page,
comment_box,
overview_page,
terminal_page,
footer,
play,
validate,
stop,
details,
reset,
closing_placeholder):
w.setAttribute(QtCore.Qt.WA_StyledBackground)
self.data = {
"views": {
"artist": artist_view,
"left": left_view,
"right": right_view,
"terminal": terminal_view,
},
"modals": {
"details": details,
},
"models": {
"instances": instance_model,
"plugins": plugin_model,
"filter": filter_model,
"terminal": terminal_model,
},
"terminal_toggles": {
"record": show_records,
"debug": show_debug,
"info": show_info,
"warning": show_warning,
"error": show_error,
"critical": show_critical
},
"tabs": {
"artist": artist_tab,
"overview": overview_tab,
"terminal": terminal_tab,
"current": "artist"
},
"pages": {
"artist": artist_page,
"overview": overview_page,
"terminal": terminal_page,
},
"buttons": {
"play": play,
"validate": validate,
"stop": stop,
"reset": reset
},
"animation": {
"display_info": info_animation,
},
"state": {
"is_closing": False,
}
}
# Pressing Enter defaults to Play
play.setFocus()
"""Signals
________ ________
|________|-->|________|
|
|
___v____
|________|
"""
artist_tab.toggled.connect(
lambda: self.on_tab_changed("artist"))
overview_tab.toggled.connect(
lambda: self.on_tab_changed("overview"))
terminal_tab.toggled.connect(
lambda: self.on_tab_changed("terminal"))
controller.was_reset.connect(self.on_was_reset)
controller.was_validated.connect(self.on_was_validated)
controller.was_published.connect(self.on_was_published)
controller.was_acted.connect(self.on_was_acted)
controller.was_finished.connect(self.on_finished)
# Discovery happens synchronously during reset, that's
# why it's important that this connection is triggered
# right away.
controller.was_discovered.connect(self.on_was_discovered,
QtCore.Qt.DirectConnection)
# This is called synchronously on each process
controller.was_processed.connect(self.on_was_processed,
QtCore.Qt.DirectConnection)
# NOTE: Listeners to this signal are run in the main thread
controller.about_to_process.connect(self.on_about_to_process,
QtCore.Qt.DirectConnection)
artist_view.toggled.connect(self.on_item_toggled)
left_view.toggled.connect(self.on_item_toggled)
right_view.toggled.connect(self.on_item_toggled)
artist_view.inspected.connect(self.on_item_inspected)
left_view.inspected.connect(self.on_item_inspected)
right_view.inspected.connect(self.on_item_inspected)
terminal_view.inspected.connect(self.on_item_inspected)
reset.clicked.connect(self.on_reset_clicked)
validate.clicked.connect(self.on_validate_clicked)
play.clicked.connect(self.on_play_clicked)
stop.clicked.connect(self.on_stop_clicked)
comment_box.textChanged.connect(self.on_comment_entered)
comment_box.returnPressed.connect(self.on_play_clicked)
right_view.customContextMenuRequested.connect(
self.on_plugin_action_menu_requested)
for box in (show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
box.setChecked(True)
self.data["tabs"][settings.InitialTab].setChecked(True)
# -------------------------------------------------------------------------
#
# Event handlers
#
# -------------------------------------------------------------------------
def on_item_expanded(self, index, state):
if not index.data(model.IsExpandable):
return
if state is None:
state = not index.data(model.Expanded)
# Collapse others
for i in index.model():
index.model().setData(i, False, model.Expanded)
index.model().setData(index, state, model.Expanded)
def on_item_inspected(self, index):
details = self.data["modals"]["details"]
details.move(QtGui.QCursor.pos())
if index.data(model.Type) == "record":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["circle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "LogRecord (%s)" % index.data(model.LogLevel),
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "error":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["exclamation-triangle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "Exception",
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "plugin":
details.show({
"icon": index.data(model.Icon) or awesome["filter"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": index.data(model.Docstring) or "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
elif index.data(model.Type) == "instance":
details.show({
"icon": index.data(model.Icon) or awesome["file"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
def on_item_toggled(self, index, state=None):
"""An item is requesting to be toggled"""
if not index.data(model.IsIdle):
return self.info("Cannot toggle")
if not index.data(model.IsOptional):
return self.info("This item is mandatory")
if state is None:
state = not index.data(model.IsChecked)
index.model().setData(index, state, model.IsChecked)
# Withdraw option to publish if no instances are toggled
play = self.findChild(QtWidgets.QWidget, "Play")
validate = self.findChild(QtWidgets.QWidget, "Validate")
any_instances = any(index.data(model.IsChecked)
for index in self.data["models"]["instances"])
play.setEnabled(any_instances)
validate.setEnabled(any_instances)
# Emit signals
if index.data(model.Type) == "instance":
instance = self.data["models"]["instances"].items[index.row()]
util.defer(
100, lambda: self.controller.emit_(
signal="instanceToggled",
kwargs={"new_value": state,
"old_value": not state,
"instance": instance}))
if index.data(model.Type) == "plugin":
util.defer(
100, lambda: self.controller.emit_(
signal="pluginToggled",
kwargs={"new_value": state,
"old_value": not state,
"plugin": index.data(model.Object)}))
def on_tab_changed(self, target):
for page in self.data["pages"].values():
page.hide()
page = self.data["pages"][target]
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
if target == "terminal":
comment_box.hide()
else:
comment_box.setVisible(comment_box.isEnabled())
page.show()
self.data["tabs"]["current"] = target
def on_validate_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.validate()
def on_play_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.publish()
def on_reset_clicked(self):
self.reset()
def on_stop_clicked(self):
self.info("Stopping..")
self.controller.is_running = False
def on_comment_entered(self):
"""The user has typed a comment"""
text_edit = self.findChild(QtWidgets.QWidget, "CommentBox")
comment = text_edit.text()
# Store within context
context = self.controller.context
context.data["comment"] = comment
placeholder = self.findChild(QtWidgets.QLabel, "CommentPlaceholder")
placeholder.setVisible(not comment)
def on_plugin_action_menu_requested(self, pos):
"""The user right-clicked on a plug-in
__________
| |
| Action 1 |
| Action 2 |
| Action 3 |
| |
|__________|
"""
index = self.data["views"]["right"].indexAt(pos)
actions = index.data(model.Actions)
if not actions:
return
menu = QtWidgets.QMenu(self)
plugins_index = self.data["models"]["filter"].mapToSource(index)
plugin = self.data["models"]["plugins"].items[plugins_index.row()]
print("plugin is: %s" % plugin)
for action in actions:
qaction = QtWidgets.QAction(action.label or action.__name__, self)
qaction.triggered.connect(partial(self.act, plugin, action))
menu.addAction(qaction)
menu.popup(self.data["views"]["right"].viewport().mapToGlobal(pos))
def on_was_discovered(self):
models = self.data["models"]
for Plugin in self.controller.plugins:
models["plugins"].append(Plugin)
def on_was_reset(self):
models = self.data["models"]
self.info(self.tr("Finishing up reset.."))
models["instances"].reset()
for instance in self.controller.context:
models["instances"].append(instance)
buttons = self.data["buttons"]
buttons["play"].show()
buttons["validate"].show()
buttons["reset"].show()
buttons["stop"].hide()
models["instances"].restore_checkstate()
models["plugins"].restore_checkstate()
# Append placeholder comment from Context
# This allows users to inject a comment from elsewhere,
# or to perhaps provide a placeholder comment/template
# for artists to fill in.
comment = self.controller.context.data.get("comment")
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setText(comment or None)
comment_box.setEnabled(comment is not None)
# Refresh tab
self.on_tab_changed(self.data["tabs"]["current"])
self.controller.current_error = None
self.on_finished()
def on_was_validated(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["play"].show()
buttons["stop"].hide()
self.on_finished()
def on_was_published(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
self.on_finished()
def on_was_processed(self, result):
models = self.data["models"]
for instance in self.controller.context:
if instance.id not in models["instances"].ids:
models["instances"].append(instance)
family = instance.data["family"]
if family:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=family)
families = instance.data.get("families")
if families:
for f in families:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=f)
models["plugins"].update_with_result(result)
models["instances"].update_with_result(result)
models["terminal"].update_with_result(result)
def on_was_acted(self, result):
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
# Update action with result
model_ = self.data["models"]["plugins"]
index = model_.items.index(result["plugin"])
index = model_.createIndex(index, 0)
model_.setData(index, not result["success"], model.ActionFailed)
model_.setData(index, False, model.IsProcessing)
models = self.data["models"]
models["terminal"].update_with_result(result)
self.on_finished()
def on_finished(self):
"""Finished signal handler"""
self.controller.is_running = False
error = self.controller.current_error
if error is not None:
self.info(self.tr("Stopped due to error(s), see Terminal."))
else:
self.info(self.tr("Finished successfully!"))
# -------------------------------------------------------------------------
#
# Functions
#
# -------------------------------------------------------------------------
def reset(self):
"""Prepare GUI for reset"""
self.info(self.tr("About to reset.."))
models = self.data["models"]
models["instances"].store_checkstate()
models["plugins"].store_checkstate()
# Reset current ids to secure no previous instances get mixed in.
models["instances"].ids = []
for m in models.values():
m.reset()
for b in self.data["buttons"].values():
b.hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
util.defer(500, self.controller.reset)
def validate(self):
self.info(self.tr("Preparing validate.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.validate)
def publish(self):
self.info(self.tr("Preparing publish.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.publish)
def act(self, plugin, action):
self.info("%s %s.." % (self.tr("Preparing"), action))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
self.controller.is_running = True
# Cause view to update, but it won't visually
# happen until Qt is given time to idle..
model_ = self.data["models"]["plugins"]
index = model_.items.index(plugin)
index = model_.createIndex(index, 0)
for key, value in {model.ActionIdle: False,
model.ActionFailed: False,
model.IsProcessing: True}.items():
model_.setData(index, value, key)
# Give Qt time to draw
util.defer(100, lambda: self.controller.act(plugin, action))
self.info(self.tr("Action prepared."))
def closeEvent(self, event):
"""Perform post-flight checks before closing
Make sure processing of any kind is wrapped up before closing
"""
# Make it snappy, but take care to clean it all up.
# TODO(marcus): Enable GUI to return on problem, such
# as asking whether or not the user really wants to quit
# given there are things currently running.
self.hide()
if self.data["state"]["is_closing"]:
# Explicitly clear potentially referenced data
self.info(self.tr("Cleaning up models.."))
for v in self.data["views"].values():
v.model().deleteLater()
v.setModel(None)
self.info(self.tr("Cleaning up terminal.."))
for item in self.data["models"]["terminal"].items:
del(item)
self.info(self.tr("Cleaning up controller.."))
self.controller.cleanup()
self.info(self.tr("All clean!"))
self.info(self.tr("Good bye"))
return super(Window, self).closeEvent(event)
self.info(self.tr("Closing.."))
def on_problem():
self.heads_up("Warning", "Had trouble closing down. "
"Please tell someone and try again.")
self.show()
if self.controller.is_running:
self.info(self.tr("..as soon as processing is finished.."))
self.controller.is_running = False
self.finished.connect(self.close)
util.defer(2000, on_problem)
return event.ignore()
self.data["state"]["is_closing"] = True
util.defer(200, self.close)
return event.ignore()
def reject(self):
"""Handle ESC key"""
if self.controller.is_running:
self.info(self.tr("Stopping.."))
self.controller.is_running = False
# -------------------------------------------------------------------------
#
# Feedback
#
# -------------------------------------------------------------------------
def info(self, message):
"""Print user-facing information
Arguments:
message (str): Text message for the user
"""
info = self.findChild(QtWidgets.QLabel, "Info")
info.setText(message)
# Include message in terminal
self.data["models"]["terminal"].append({
"label": message,
"type": "info"
})
animation = self.data["animation"]["display_info"]
animation.stop()
animation.start()
# TODO(marcus): Should this be configurable? Do we want
# the shell to fill up with these messages?
util.u_print(message)
def warning(self, message):
"""Block processing and print warning until user hits "Continue"
Arguments:
message (str): Message to display
"""
# TODO(marcus): Implement this.
self.info(message)
def heads_up(self, title, message, command=None):
"""Provide a front-and-center message with optional command
Arguments:
title (str): Bold and short message
message (str): Extended message
command (optional, callable): Function is provided as a button
"""
# TODO(marcus): Implement this.
self.info(message)
|
pyblish/pyblish-lite | pyblish_lite/window.py | Window.on_plugin_action_menu_requested | python | def on_plugin_action_menu_requested(self, pos):
index = self.data["views"]["right"].indexAt(pos)
actions = index.data(model.Actions)
if not actions:
return
menu = QtWidgets.QMenu(self)
plugins_index = self.data["models"]["filter"].mapToSource(index)
plugin = self.data["models"]["plugins"].items[plugins_index.row()]
print("plugin is: %s" % plugin)
for action in actions:
qaction = QtWidgets.QAction(action.label or action.__name__, self)
qaction.triggered.connect(partial(self.act, plugin, action))
menu.addAction(qaction)
menu.popup(self.data["views"]["right"].viewport().mapToGlobal(pos)) | The user right-clicked on a plug-in
__________
| |
| Action 1 |
| Action 2 |
| Action 3 |
| |
|__________| | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/window.py#L724-L752 | null | class Window(QtWidgets.QDialog):
def __init__(self, controller, parent=None):
super(Window, self).__init__(parent)
icon = QtGui.QIcon(util.get_asset("img", "logo-extrasmall.png"))
self.setWindowFlags(self.windowFlags() |
QtCore.Qt.WindowTitleHint |
QtCore.Qt.WindowMaximizeButtonHint |
QtCore.Qt.WindowMinimizeButtonHint |
QtCore.Qt.WindowCloseButtonHint)
self.setWindowIcon(icon)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.controller = controller
"""General layout
__________________ _____________________
| | | | | |
| Header | --> | Tab | Tab | Tab |
|__________________| |_______|_______|_____|
| | _____________________
| | | |
| | | |
| Body | | |
| | --> | Page |
| | | |
| | |_____________________|
|__________________| _____________________
| | | | |
| Footer | | Status | Buttons |
|__________________| |___________|_________|
"""
header = QtWidgets.QWidget()
artist_tab = QtWidgets.QRadioButton()
overview_tab = QtWidgets.QRadioButton()
terminal_tab = QtWidgets.QRadioButton()
spacer = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(header)
layout.addWidget(artist_tab, 0)
layout.addWidget(overview_tab, 0)
layout.addWidget(terminal_tab, 0)
layout.addWidget(spacer, 1) # Compress items to the left
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Artist Page
__________________
| |
| | ------------ |
| | ----- |
| |
| | -------- |
| | ------- |
| |
|__________________|
"""
artist_page = QtWidgets.QWidget()
artist_view = view.Item()
artist_delegate = delegate.Artist()
artist_view.setItemDelegate(artist_delegate)
layout = QtWidgets.QVBoxLayout(artist_page)
layout.addWidget(artist_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Overview Page
___________________
| |
| o ----- o---- |
| o ---- o--- |
| o ---- o---- |
| o ---- o------ |
| |
|__________________|
"""
overview_page = QtWidgets.QWidget()
left_view = view.Item()
right_view = view.Item()
item_delegate = delegate.Item()
left_view.setItemDelegate(item_delegate)
right_view.setItemDelegate(item_delegate)
layout = QtWidgets.QHBoxLayout(overview_page)
layout.addWidget(left_view, 1)
layout.addWidget(right_view, 1)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Terminal
__________________
| |
| \ |
| \ |
| / |
| / ______ |
| |
|__________________|
"""
terminal_container = QtWidgets.QWidget()
terminal_delegate = delegate.Terminal()
terminal_view = view.LogView()
terminal_view.setItemDelegate(terminal_delegate)
layout = QtWidgets.QVBoxLayout(terminal_container)
layout.addWidget(terminal_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
terminal_footer = QtWidgets.QWidget()
search_box = QtWidgets.QLineEdit()
instance_combo = QtWidgets.QComboBox()
plugin_combo = QtWidgets.QComboBox()
show_errors = QtWidgets.QCheckBox()
show_records = QtWidgets.QCheckBox()
show_debug = QtWidgets.QCheckBox()
show_info = QtWidgets.QCheckBox()
show_warning = QtWidgets.QCheckBox()
show_error = QtWidgets.QCheckBox()
show_critical = QtWidgets.QCheckBox()
layout = QtWidgets.QHBoxLayout(terminal_footer)
for w in (search_box,
instance_combo,
plugin_combo,
show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
layout.addWidget(w)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(3)
terminal_page = QtWidgets.QWidget()
layout = QtWidgets.QVBoxLayout(terminal_page)
layout.addWidget(terminal_container)
# layout.addWidget(terminal_footer) # TODO
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
# Add some room between window borders and contents
body = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(body)
layout.setContentsMargins(5, 5, 5, 0)
layout.addWidget(artist_page)
layout.addWidget(overview_page)
layout.addWidget(terminal_page)
"""Comment Box
____________________________
|> My comment |
| |
|____________________________|
"""
comment_box = QtWidgets.QLineEdit()
comment_placeholder = QtWidgets.QLabel(
self.tr("Comment.."), comment_box)
comment_placeholder.move(2, 2)
comment_box.setEnabled(False)
comment_box.hide()
"""Details View
____________________________
| |
| An Item 23 ms |
| - family |
| |
|----------------------------|
| |
| Docstring |
|____________________________|
"""
details = view.Details(self)
"""Footer
______________________
| ___ ___ |
| | o || > ||
| |___||___||
|______________________|
"""
footer = QtWidgets.QWidget()
info = QtWidgets.QLabel()
spacer = QtWidgets.QWidget()
reset = QtWidgets.QPushButton(awesome["refresh"])
validate = QtWidgets.QPushButton(awesome["flask"])
play = QtWidgets.QPushButton(awesome["play"])
stop = QtWidgets.QPushButton(awesome["stop"])
layout = QtWidgets.QHBoxLayout(footer)
layout.setContentsMargins(5, 5, 5, 5)
layout.addWidget(info, 0)
layout.addWidget(spacer, 1)
layout.addWidget(reset, 0)
layout.addWidget(validate, 0)
layout.addWidget(play, 0)
layout.addWidget(stop, 0)
# Placeholder for when GUI is closing
# TODO(marcus): Fade to black and the the user about what's happening
closing_placeholder = QtWidgets.QWidget(self)
closing_placeholder.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
closing_placeholder.hide()
# Main layout
layout = QtWidgets.QVBoxLayout(self)
layout.addWidget(header, 0)
layout.addWidget(body, 3)
layout.addWidget(closing_placeholder, 1)
layout.addWidget(comment_box, 0)
layout.addWidget(footer, 0)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Animation
___
/ \
| | ___
\___/ / \
___ | |
/ \ \___/
| |
\___/
"""
# Display info
info_effect = QtWidgets.QGraphicsOpacityEffect(info)
info.setGraphicsEffect(info_effect)
timeline = QtCore.QSequentialAnimationGroup()
on = QtCore.QPropertyAnimation(info_effect, b"opacity")
on.setDuration(0)
on.setStartValue(0)
on.setEndValue(1)
off = QtCore.QPropertyAnimation(info_effect, b"opacity")
off.setDuration(0)
off.setStartValue(1)
off.setEndValue(0)
fade = QtCore.QPropertyAnimation(info_effect, b"opacity")
fade.setDuration(500)
fade.setStartValue(1.0)
fade.setEndValue(0.0)
timeline.addAnimation(on)
timeline.addPause(50)
timeline.addAnimation(off)
timeline.addPause(50)
timeline.addAnimation(on)
timeline.addPause(2000)
timeline.addAnimation(fade)
info_animation = timeline
"""Setup
Widgets are referred to in CSS via their object-name. We
use the same mechanism internally to refer to objects; so rather
than storing widgets as self.my_widget, it is referred to as:
>>> my_widget = self.findChild(QtWidgets.QWidget, "MyWidget")
This way there is only ever a single method of referring to any widget.
___
| |
/\/ \/\
/ _ \
\ / \ /
| | | |
/ \_/ \
\ /
\/\ /\/
|___|
"""
instance_model = model.Instance()
plugin_model = model.Plugin()
terminal_model = model.Terminal()
filter_model = model.ProxyModel(plugin_model)
artist_view.setModel(instance_model)
left_view.setModel(instance_model)
right_view.setModel(filter_model)
terminal_view.setModel(terminal_model)
instance_combo.setModel(instance_model)
plugin_combo.setModel(plugin_model)
names = {
# Main
"Header": header,
"Body": body,
"Footer": footer,
"Info": info,
# Modals
"Details": details,
# Pages
"Artist": artist_page,
"Overview": overview_page,
"Terminal": terminal_page,
# Tabs
"ArtistTab": artist_tab,
"OverviewTab": overview_tab,
"TerminalTab": terminal_tab,
# Buttons
"Play": play,
"Validate": validate,
"Reset": reset,
"Stop": stop,
# Misc
"CommentBox": comment_box,
"CommentPlaceholder": comment_placeholder,
"ClosingPlaceholder": closing_placeholder,
}
for name, w in names.items():
w.setObjectName(name)
# Enable CSS on plain QWidget objects
for w in (header,
body,
artist_page,
comment_box,
overview_page,
terminal_page,
footer,
play,
validate,
stop,
details,
reset,
closing_placeholder):
w.setAttribute(QtCore.Qt.WA_StyledBackground)
self.data = {
"views": {
"artist": artist_view,
"left": left_view,
"right": right_view,
"terminal": terminal_view,
},
"modals": {
"details": details,
},
"models": {
"instances": instance_model,
"plugins": plugin_model,
"filter": filter_model,
"terminal": terminal_model,
},
"terminal_toggles": {
"record": show_records,
"debug": show_debug,
"info": show_info,
"warning": show_warning,
"error": show_error,
"critical": show_critical
},
"tabs": {
"artist": artist_tab,
"overview": overview_tab,
"terminal": terminal_tab,
"current": "artist"
},
"pages": {
"artist": artist_page,
"overview": overview_page,
"terminal": terminal_page,
},
"buttons": {
"play": play,
"validate": validate,
"stop": stop,
"reset": reset
},
"animation": {
"display_info": info_animation,
},
"state": {
"is_closing": False,
}
}
# Pressing Enter defaults to Play
play.setFocus()
"""Signals
________ ________
|________|-->|________|
|
|
___v____
|________|
"""
artist_tab.toggled.connect(
lambda: self.on_tab_changed("artist"))
overview_tab.toggled.connect(
lambda: self.on_tab_changed("overview"))
terminal_tab.toggled.connect(
lambda: self.on_tab_changed("terminal"))
controller.was_reset.connect(self.on_was_reset)
controller.was_validated.connect(self.on_was_validated)
controller.was_published.connect(self.on_was_published)
controller.was_acted.connect(self.on_was_acted)
controller.was_finished.connect(self.on_finished)
# Discovery happens synchronously during reset, that's
# why it's important that this connection is triggered
# right away.
controller.was_discovered.connect(self.on_was_discovered,
QtCore.Qt.DirectConnection)
# This is called synchronously on each process
controller.was_processed.connect(self.on_was_processed,
QtCore.Qt.DirectConnection)
# NOTE: Listeners to this signal are run in the main thread
controller.about_to_process.connect(self.on_about_to_process,
QtCore.Qt.DirectConnection)
artist_view.toggled.connect(self.on_item_toggled)
left_view.toggled.connect(self.on_item_toggled)
right_view.toggled.connect(self.on_item_toggled)
artist_view.inspected.connect(self.on_item_inspected)
left_view.inspected.connect(self.on_item_inspected)
right_view.inspected.connect(self.on_item_inspected)
terminal_view.inspected.connect(self.on_item_inspected)
reset.clicked.connect(self.on_reset_clicked)
validate.clicked.connect(self.on_validate_clicked)
play.clicked.connect(self.on_play_clicked)
stop.clicked.connect(self.on_stop_clicked)
comment_box.textChanged.connect(self.on_comment_entered)
comment_box.returnPressed.connect(self.on_play_clicked)
right_view.customContextMenuRequested.connect(
self.on_plugin_action_menu_requested)
for box in (show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
box.setChecked(True)
self.data["tabs"][settings.InitialTab].setChecked(True)
# -------------------------------------------------------------------------
#
# Event handlers
#
# -------------------------------------------------------------------------
def on_item_expanded(self, index, state):
if not index.data(model.IsExpandable):
return
if state is None:
state = not index.data(model.Expanded)
# Collapse others
for i in index.model():
index.model().setData(i, False, model.Expanded)
index.model().setData(index, state, model.Expanded)
def on_item_inspected(self, index):
details = self.data["modals"]["details"]
details.move(QtGui.QCursor.pos())
if index.data(model.Type) == "record":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["circle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "LogRecord (%s)" % index.data(model.LogLevel),
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "error":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["exclamation-triangle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "Exception",
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "plugin":
details.show({
"icon": index.data(model.Icon) or awesome["filter"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": index.data(model.Docstring) or "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
elif index.data(model.Type) == "instance":
details.show({
"icon": index.data(model.Icon) or awesome["file"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
def on_item_toggled(self, index, state=None):
"""An item is requesting to be toggled"""
if not index.data(model.IsIdle):
return self.info("Cannot toggle")
if not index.data(model.IsOptional):
return self.info("This item is mandatory")
if state is None:
state = not index.data(model.IsChecked)
index.model().setData(index, state, model.IsChecked)
# Withdraw option to publish if no instances are toggled
play = self.findChild(QtWidgets.QWidget, "Play")
validate = self.findChild(QtWidgets.QWidget, "Validate")
any_instances = any(index.data(model.IsChecked)
for index in self.data["models"]["instances"])
play.setEnabled(any_instances)
validate.setEnabled(any_instances)
# Emit signals
if index.data(model.Type) == "instance":
instance = self.data["models"]["instances"].items[index.row()]
util.defer(
100, lambda: self.controller.emit_(
signal="instanceToggled",
kwargs={"new_value": state,
"old_value": not state,
"instance": instance}))
if index.data(model.Type) == "plugin":
util.defer(
100, lambda: self.controller.emit_(
signal="pluginToggled",
kwargs={"new_value": state,
"old_value": not state,
"plugin": index.data(model.Object)}))
def on_tab_changed(self, target):
for page in self.data["pages"].values():
page.hide()
page = self.data["pages"][target]
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
if target == "terminal":
comment_box.hide()
else:
comment_box.setVisible(comment_box.isEnabled())
page.show()
self.data["tabs"]["current"] = target
def on_validate_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.validate()
def on_play_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.publish()
def on_reset_clicked(self):
self.reset()
def on_stop_clicked(self):
self.info("Stopping..")
self.controller.is_running = False
def on_comment_entered(self):
"""The user has typed a comment"""
text_edit = self.findChild(QtWidgets.QWidget, "CommentBox")
comment = text_edit.text()
# Store within context
context = self.controller.context
context.data["comment"] = comment
placeholder = self.findChild(QtWidgets.QLabel, "CommentPlaceholder")
placeholder.setVisible(not comment)
def on_about_to_process(self, plugin, instance):
"""Reflect currently running pair in GUI"""
if instance is not None:
instance_model = self.data["models"]["instances"]
index = instance_model.items.index(instance)
index = instance_model.createIndex(index, 0)
instance_model.setData(index, True, model.IsProcessing)
plugin_model = self.data["models"]["plugins"]
index = plugin_model.items.index(plugin)
index = plugin_model.createIndex(index, 0)
plugin_model.setData(index, True, model.IsProcessing)
self.info("%s %s" % (self.tr("Processing"), index.data(model.Label)))
def on_was_discovered(self):
models = self.data["models"]
for Plugin in self.controller.plugins:
models["plugins"].append(Plugin)
def on_was_reset(self):
models = self.data["models"]
self.info(self.tr("Finishing up reset.."))
models["instances"].reset()
for instance in self.controller.context:
models["instances"].append(instance)
buttons = self.data["buttons"]
buttons["play"].show()
buttons["validate"].show()
buttons["reset"].show()
buttons["stop"].hide()
models["instances"].restore_checkstate()
models["plugins"].restore_checkstate()
# Append placeholder comment from Context
# This allows users to inject a comment from elsewhere,
# or to perhaps provide a placeholder comment/template
# for artists to fill in.
comment = self.controller.context.data.get("comment")
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setText(comment or None)
comment_box.setEnabled(comment is not None)
# Refresh tab
self.on_tab_changed(self.data["tabs"]["current"])
self.controller.current_error = None
self.on_finished()
def on_was_validated(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["play"].show()
buttons["stop"].hide()
self.on_finished()
def on_was_published(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
self.on_finished()
def on_was_processed(self, result):
models = self.data["models"]
for instance in self.controller.context:
if instance.id not in models["instances"].ids:
models["instances"].append(instance)
family = instance.data["family"]
if family:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=family)
families = instance.data.get("families")
if families:
for f in families:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=f)
models["plugins"].update_with_result(result)
models["instances"].update_with_result(result)
models["terminal"].update_with_result(result)
def on_was_acted(self, result):
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
# Update action with result
model_ = self.data["models"]["plugins"]
index = model_.items.index(result["plugin"])
index = model_.createIndex(index, 0)
model_.setData(index, not result["success"], model.ActionFailed)
model_.setData(index, False, model.IsProcessing)
models = self.data["models"]
models["terminal"].update_with_result(result)
self.on_finished()
def on_finished(self):
"""Finished signal handler"""
self.controller.is_running = False
error = self.controller.current_error
if error is not None:
self.info(self.tr("Stopped due to error(s), see Terminal."))
else:
self.info(self.tr("Finished successfully!"))
# -------------------------------------------------------------------------
#
# Functions
#
# -------------------------------------------------------------------------
def reset(self):
"""Prepare GUI for reset"""
self.info(self.tr("About to reset.."))
models = self.data["models"]
models["instances"].store_checkstate()
models["plugins"].store_checkstate()
# Reset current ids to secure no previous instances get mixed in.
models["instances"].ids = []
for m in models.values():
m.reset()
for b in self.data["buttons"].values():
b.hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
util.defer(500, self.controller.reset)
def validate(self):
self.info(self.tr("Preparing validate.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.validate)
def publish(self):
self.info(self.tr("Preparing publish.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.publish)
def act(self, plugin, action):
self.info("%s %s.." % (self.tr("Preparing"), action))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
self.controller.is_running = True
# Cause view to update, but it won't visually
# happen until Qt is given time to idle..
model_ = self.data["models"]["plugins"]
index = model_.items.index(plugin)
index = model_.createIndex(index, 0)
for key, value in {model.ActionIdle: False,
model.ActionFailed: False,
model.IsProcessing: True}.items():
model_.setData(index, value, key)
# Give Qt time to draw
util.defer(100, lambda: self.controller.act(plugin, action))
self.info(self.tr("Action prepared."))
def closeEvent(self, event):
"""Perform post-flight checks before closing
Make sure processing of any kind is wrapped up before closing
"""
# Make it snappy, but take care to clean it all up.
# TODO(marcus): Enable GUI to return on problem, such
# as asking whether or not the user really wants to quit
# given there are things currently running.
self.hide()
if self.data["state"]["is_closing"]:
# Explicitly clear potentially referenced data
self.info(self.tr("Cleaning up models.."))
for v in self.data["views"].values():
v.model().deleteLater()
v.setModel(None)
self.info(self.tr("Cleaning up terminal.."))
for item in self.data["models"]["terminal"].items:
del(item)
self.info(self.tr("Cleaning up controller.."))
self.controller.cleanup()
self.info(self.tr("All clean!"))
self.info(self.tr("Good bye"))
return super(Window, self).closeEvent(event)
self.info(self.tr("Closing.."))
def on_problem():
self.heads_up("Warning", "Had trouble closing down. "
"Please tell someone and try again.")
self.show()
if self.controller.is_running:
self.info(self.tr("..as soon as processing is finished.."))
self.controller.is_running = False
self.finished.connect(self.close)
util.defer(2000, on_problem)
return event.ignore()
self.data["state"]["is_closing"] = True
util.defer(200, self.close)
return event.ignore()
def reject(self):
"""Handle ESC key"""
if self.controller.is_running:
self.info(self.tr("Stopping.."))
self.controller.is_running = False
# -------------------------------------------------------------------------
#
# Feedback
#
# -------------------------------------------------------------------------
def info(self, message):
"""Print user-facing information
Arguments:
message (str): Text message for the user
"""
info = self.findChild(QtWidgets.QLabel, "Info")
info.setText(message)
# Include message in terminal
self.data["models"]["terminal"].append({
"label": message,
"type": "info"
})
animation = self.data["animation"]["display_info"]
animation.stop()
animation.start()
# TODO(marcus): Should this be configurable? Do we want
# the shell to fill up with these messages?
util.u_print(message)
def warning(self, message):
"""Block processing and print warning until user hits "Continue"
Arguments:
message (str): Message to display
"""
# TODO(marcus): Implement this.
self.info(message)
def heads_up(self, title, message, command=None):
"""Provide a front-and-center message with optional command
Arguments:
title (str): Bold and short message
message (str): Extended message
command (optional, callable): Function is provided as a button
"""
# TODO(marcus): Implement this.
self.info(message)
|
pyblish/pyblish-lite | pyblish_lite/window.py | Window.on_finished | python | def on_finished(self):
self.controller.is_running = False
error = self.controller.current_error
if error is not None:
self.info(self.tr("Stopped due to error(s), see Terminal."))
else:
self.info(self.tr("Finished successfully!")) | Finished signal handler | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/window.py#L871-L879 | null | class Window(QtWidgets.QDialog):
def __init__(self, controller, parent=None):
super(Window, self).__init__(parent)
icon = QtGui.QIcon(util.get_asset("img", "logo-extrasmall.png"))
self.setWindowFlags(self.windowFlags() |
QtCore.Qt.WindowTitleHint |
QtCore.Qt.WindowMaximizeButtonHint |
QtCore.Qt.WindowMinimizeButtonHint |
QtCore.Qt.WindowCloseButtonHint)
self.setWindowIcon(icon)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.controller = controller
"""General layout
__________________ _____________________
| | | | | |
| Header | --> | Tab | Tab | Tab |
|__________________| |_______|_______|_____|
| | _____________________
| | | |
| | | |
| Body | | |
| | --> | Page |
| | | |
| | |_____________________|
|__________________| _____________________
| | | | |
| Footer | | Status | Buttons |
|__________________| |___________|_________|
"""
header = QtWidgets.QWidget()
artist_tab = QtWidgets.QRadioButton()
overview_tab = QtWidgets.QRadioButton()
terminal_tab = QtWidgets.QRadioButton()
spacer = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(header)
layout.addWidget(artist_tab, 0)
layout.addWidget(overview_tab, 0)
layout.addWidget(terminal_tab, 0)
layout.addWidget(spacer, 1) # Compress items to the left
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Artist Page
__________________
| |
| | ------------ |
| | ----- |
| |
| | -------- |
| | ------- |
| |
|__________________|
"""
artist_page = QtWidgets.QWidget()
artist_view = view.Item()
artist_delegate = delegate.Artist()
artist_view.setItemDelegate(artist_delegate)
layout = QtWidgets.QVBoxLayout(artist_page)
layout.addWidget(artist_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Overview Page
___________________
| |
| o ----- o---- |
| o ---- o--- |
| o ---- o---- |
| o ---- o------ |
| |
|__________________|
"""
overview_page = QtWidgets.QWidget()
left_view = view.Item()
right_view = view.Item()
item_delegate = delegate.Item()
left_view.setItemDelegate(item_delegate)
right_view.setItemDelegate(item_delegate)
layout = QtWidgets.QHBoxLayout(overview_page)
layout.addWidget(left_view, 1)
layout.addWidget(right_view, 1)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Terminal
__________________
| |
| \ |
| \ |
| / |
| / ______ |
| |
|__________________|
"""
terminal_container = QtWidgets.QWidget()
terminal_delegate = delegate.Terminal()
terminal_view = view.LogView()
terminal_view.setItemDelegate(terminal_delegate)
layout = QtWidgets.QVBoxLayout(terminal_container)
layout.addWidget(terminal_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
terminal_footer = QtWidgets.QWidget()
search_box = QtWidgets.QLineEdit()
instance_combo = QtWidgets.QComboBox()
plugin_combo = QtWidgets.QComboBox()
show_errors = QtWidgets.QCheckBox()
show_records = QtWidgets.QCheckBox()
show_debug = QtWidgets.QCheckBox()
show_info = QtWidgets.QCheckBox()
show_warning = QtWidgets.QCheckBox()
show_error = QtWidgets.QCheckBox()
show_critical = QtWidgets.QCheckBox()
layout = QtWidgets.QHBoxLayout(terminal_footer)
for w in (search_box,
instance_combo,
plugin_combo,
show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
layout.addWidget(w)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(3)
terminal_page = QtWidgets.QWidget()
layout = QtWidgets.QVBoxLayout(terminal_page)
layout.addWidget(terminal_container)
# layout.addWidget(terminal_footer) # TODO
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
# Add some room between window borders and contents
body = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(body)
layout.setContentsMargins(5, 5, 5, 0)
layout.addWidget(artist_page)
layout.addWidget(overview_page)
layout.addWidget(terminal_page)
"""Comment Box
____________________________
|> My comment |
| |
|____________________________|
"""
comment_box = QtWidgets.QLineEdit()
comment_placeholder = QtWidgets.QLabel(
self.tr("Comment.."), comment_box)
comment_placeholder.move(2, 2)
comment_box.setEnabled(False)
comment_box.hide()
"""Details View
____________________________
| |
| An Item 23 ms |
| - family |
| |
|----------------------------|
| |
| Docstring |
|____________________________|
"""
details = view.Details(self)
"""Footer
______________________
| ___ ___ |
| | o || > ||
| |___||___||
|______________________|
"""
footer = QtWidgets.QWidget()
info = QtWidgets.QLabel()
spacer = QtWidgets.QWidget()
reset = QtWidgets.QPushButton(awesome["refresh"])
validate = QtWidgets.QPushButton(awesome["flask"])
play = QtWidgets.QPushButton(awesome["play"])
stop = QtWidgets.QPushButton(awesome["stop"])
layout = QtWidgets.QHBoxLayout(footer)
layout.setContentsMargins(5, 5, 5, 5)
layout.addWidget(info, 0)
layout.addWidget(spacer, 1)
layout.addWidget(reset, 0)
layout.addWidget(validate, 0)
layout.addWidget(play, 0)
layout.addWidget(stop, 0)
# Placeholder for when GUI is closing
# TODO(marcus): Fade to black and the the user about what's happening
closing_placeholder = QtWidgets.QWidget(self)
closing_placeholder.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
closing_placeholder.hide()
# Main layout
layout = QtWidgets.QVBoxLayout(self)
layout.addWidget(header, 0)
layout.addWidget(body, 3)
layout.addWidget(closing_placeholder, 1)
layout.addWidget(comment_box, 0)
layout.addWidget(footer, 0)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Animation
___
/ \
| | ___
\___/ / \
___ | |
/ \ \___/
| |
\___/
"""
# Display info
info_effect = QtWidgets.QGraphicsOpacityEffect(info)
info.setGraphicsEffect(info_effect)
timeline = QtCore.QSequentialAnimationGroup()
on = QtCore.QPropertyAnimation(info_effect, b"opacity")
on.setDuration(0)
on.setStartValue(0)
on.setEndValue(1)
off = QtCore.QPropertyAnimation(info_effect, b"opacity")
off.setDuration(0)
off.setStartValue(1)
off.setEndValue(0)
fade = QtCore.QPropertyAnimation(info_effect, b"opacity")
fade.setDuration(500)
fade.setStartValue(1.0)
fade.setEndValue(0.0)
timeline.addAnimation(on)
timeline.addPause(50)
timeline.addAnimation(off)
timeline.addPause(50)
timeline.addAnimation(on)
timeline.addPause(2000)
timeline.addAnimation(fade)
info_animation = timeline
"""Setup
Widgets are referred to in CSS via their object-name. We
use the same mechanism internally to refer to objects; so rather
than storing widgets as self.my_widget, it is referred to as:
>>> my_widget = self.findChild(QtWidgets.QWidget, "MyWidget")
This way there is only ever a single method of referring to any widget.
___
| |
/\/ \/\
/ _ \
\ / \ /
| | | |
/ \_/ \
\ /
\/\ /\/
|___|
"""
instance_model = model.Instance()
plugin_model = model.Plugin()
terminal_model = model.Terminal()
filter_model = model.ProxyModel(plugin_model)
artist_view.setModel(instance_model)
left_view.setModel(instance_model)
right_view.setModel(filter_model)
terminal_view.setModel(terminal_model)
instance_combo.setModel(instance_model)
plugin_combo.setModel(plugin_model)
names = {
# Main
"Header": header,
"Body": body,
"Footer": footer,
"Info": info,
# Modals
"Details": details,
# Pages
"Artist": artist_page,
"Overview": overview_page,
"Terminal": terminal_page,
# Tabs
"ArtistTab": artist_tab,
"OverviewTab": overview_tab,
"TerminalTab": terminal_tab,
# Buttons
"Play": play,
"Validate": validate,
"Reset": reset,
"Stop": stop,
# Misc
"CommentBox": comment_box,
"CommentPlaceholder": comment_placeholder,
"ClosingPlaceholder": closing_placeholder,
}
for name, w in names.items():
w.setObjectName(name)
# Enable CSS on plain QWidget objects
for w in (header,
body,
artist_page,
comment_box,
overview_page,
terminal_page,
footer,
play,
validate,
stop,
details,
reset,
closing_placeholder):
w.setAttribute(QtCore.Qt.WA_StyledBackground)
self.data = {
"views": {
"artist": artist_view,
"left": left_view,
"right": right_view,
"terminal": terminal_view,
},
"modals": {
"details": details,
},
"models": {
"instances": instance_model,
"plugins": plugin_model,
"filter": filter_model,
"terminal": terminal_model,
},
"terminal_toggles": {
"record": show_records,
"debug": show_debug,
"info": show_info,
"warning": show_warning,
"error": show_error,
"critical": show_critical
},
"tabs": {
"artist": artist_tab,
"overview": overview_tab,
"terminal": terminal_tab,
"current": "artist"
},
"pages": {
"artist": artist_page,
"overview": overview_page,
"terminal": terminal_page,
},
"buttons": {
"play": play,
"validate": validate,
"stop": stop,
"reset": reset
},
"animation": {
"display_info": info_animation,
},
"state": {
"is_closing": False,
}
}
# Pressing Enter defaults to Play
play.setFocus()
"""Signals
________ ________
|________|-->|________|
|
|
___v____
|________|
"""
artist_tab.toggled.connect(
lambda: self.on_tab_changed("artist"))
overview_tab.toggled.connect(
lambda: self.on_tab_changed("overview"))
terminal_tab.toggled.connect(
lambda: self.on_tab_changed("terminal"))
controller.was_reset.connect(self.on_was_reset)
controller.was_validated.connect(self.on_was_validated)
controller.was_published.connect(self.on_was_published)
controller.was_acted.connect(self.on_was_acted)
controller.was_finished.connect(self.on_finished)
# Discovery happens synchronously during reset, that's
# why it's important that this connection is triggered
# right away.
controller.was_discovered.connect(self.on_was_discovered,
QtCore.Qt.DirectConnection)
# This is called synchronously on each process
controller.was_processed.connect(self.on_was_processed,
QtCore.Qt.DirectConnection)
# NOTE: Listeners to this signal are run in the main thread
controller.about_to_process.connect(self.on_about_to_process,
QtCore.Qt.DirectConnection)
artist_view.toggled.connect(self.on_item_toggled)
left_view.toggled.connect(self.on_item_toggled)
right_view.toggled.connect(self.on_item_toggled)
artist_view.inspected.connect(self.on_item_inspected)
left_view.inspected.connect(self.on_item_inspected)
right_view.inspected.connect(self.on_item_inspected)
terminal_view.inspected.connect(self.on_item_inspected)
reset.clicked.connect(self.on_reset_clicked)
validate.clicked.connect(self.on_validate_clicked)
play.clicked.connect(self.on_play_clicked)
stop.clicked.connect(self.on_stop_clicked)
comment_box.textChanged.connect(self.on_comment_entered)
comment_box.returnPressed.connect(self.on_play_clicked)
right_view.customContextMenuRequested.connect(
self.on_plugin_action_menu_requested)
for box in (show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
box.setChecked(True)
self.data["tabs"][settings.InitialTab].setChecked(True)
# -------------------------------------------------------------------------
#
# Event handlers
#
# -------------------------------------------------------------------------
def on_item_expanded(self, index, state):
if not index.data(model.IsExpandable):
return
if state is None:
state = not index.data(model.Expanded)
# Collapse others
for i in index.model():
index.model().setData(i, False, model.Expanded)
index.model().setData(index, state, model.Expanded)
def on_item_inspected(self, index):
details = self.data["modals"]["details"]
details.move(QtGui.QCursor.pos())
if index.data(model.Type) == "record":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["circle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "LogRecord (%s)" % index.data(model.LogLevel),
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "error":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["exclamation-triangle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "Exception",
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "plugin":
details.show({
"icon": index.data(model.Icon) or awesome["filter"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": index.data(model.Docstring) or "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
elif index.data(model.Type) == "instance":
details.show({
"icon": index.data(model.Icon) or awesome["file"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
def on_item_toggled(self, index, state=None):
"""An item is requesting to be toggled"""
if not index.data(model.IsIdle):
return self.info("Cannot toggle")
if not index.data(model.IsOptional):
return self.info("This item is mandatory")
if state is None:
state = not index.data(model.IsChecked)
index.model().setData(index, state, model.IsChecked)
# Withdraw option to publish if no instances are toggled
play = self.findChild(QtWidgets.QWidget, "Play")
validate = self.findChild(QtWidgets.QWidget, "Validate")
any_instances = any(index.data(model.IsChecked)
for index in self.data["models"]["instances"])
play.setEnabled(any_instances)
validate.setEnabled(any_instances)
# Emit signals
if index.data(model.Type) == "instance":
instance = self.data["models"]["instances"].items[index.row()]
util.defer(
100, lambda: self.controller.emit_(
signal="instanceToggled",
kwargs={"new_value": state,
"old_value": not state,
"instance": instance}))
if index.data(model.Type) == "plugin":
util.defer(
100, lambda: self.controller.emit_(
signal="pluginToggled",
kwargs={"new_value": state,
"old_value": not state,
"plugin": index.data(model.Object)}))
def on_tab_changed(self, target):
for page in self.data["pages"].values():
page.hide()
page = self.data["pages"][target]
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
if target == "terminal":
comment_box.hide()
else:
comment_box.setVisible(comment_box.isEnabled())
page.show()
self.data["tabs"]["current"] = target
def on_validate_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.validate()
def on_play_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.publish()
def on_reset_clicked(self):
self.reset()
def on_stop_clicked(self):
self.info("Stopping..")
self.controller.is_running = False
def on_comment_entered(self):
"""The user has typed a comment"""
text_edit = self.findChild(QtWidgets.QWidget, "CommentBox")
comment = text_edit.text()
# Store within context
context = self.controller.context
context.data["comment"] = comment
placeholder = self.findChild(QtWidgets.QLabel, "CommentPlaceholder")
placeholder.setVisible(not comment)
def on_about_to_process(self, plugin, instance):
"""Reflect currently running pair in GUI"""
if instance is not None:
instance_model = self.data["models"]["instances"]
index = instance_model.items.index(instance)
index = instance_model.createIndex(index, 0)
instance_model.setData(index, True, model.IsProcessing)
plugin_model = self.data["models"]["plugins"]
index = plugin_model.items.index(plugin)
index = plugin_model.createIndex(index, 0)
plugin_model.setData(index, True, model.IsProcessing)
self.info("%s %s" % (self.tr("Processing"), index.data(model.Label)))
def on_plugin_action_menu_requested(self, pos):
"""The user right-clicked on a plug-in
__________
| |
| Action 1 |
| Action 2 |
| Action 3 |
| |
|__________|
"""
index = self.data["views"]["right"].indexAt(pos)
actions = index.data(model.Actions)
if not actions:
return
menu = QtWidgets.QMenu(self)
plugins_index = self.data["models"]["filter"].mapToSource(index)
plugin = self.data["models"]["plugins"].items[plugins_index.row()]
print("plugin is: %s" % plugin)
for action in actions:
qaction = QtWidgets.QAction(action.label or action.__name__, self)
qaction.triggered.connect(partial(self.act, plugin, action))
menu.addAction(qaction)
menu.popup(self.data["views"]["right"].viewport().mapToGlobal(pos))
def on_was_discovered(self):
models = self.data["models"]
for Plugin in self.controller.plugins:
models["plugins"].append(Plugin)
def on_was_reset(self):
models = self.data["models"]
self.info(self.tr("Finishing up reset.."))
models["instances"].reset()
for instance in self.controller.context:
models["instances"].append(instance)
buttons = self.data["buttons"]
buttons["play"].show()
buttons["validate"].show()
buttons["reset"].show()
buttons["stop"].hide()
models["instances"].restore_checkstate()
models["plugins"].restore_checkstate()
# Append placeholder comment from Context
# This allows users to inject a comment from elsewhere,
# or to perhaps provide a placeholder comment/template
# for artists to fill in.
comment = self.controller.context.data.get("comment")
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setText(comment or None)
comment_box.setEnabled(comment is not None)
# Refresh tab
self.on_tab_changed(self.data["tabs"]["current"])
self.controller.current_error = None
self.on_finished()
def on_was_validated(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["play"].show()
buttons["stop"].hide()
self.on_finished()
def on_was_published(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
self.on_finished()
def on_was_processed(self, result):
models = self.data["models"]
for instance in self.controller.context:
if instance.id not in models["instances"].ids:
models["instances"].append(instance)
family = instance.data["family"]
if family:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=family)
families = instance.data.get("families")
if families:
for f in families:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=f)
models["plugins"].update_with_result(result)
models["instances"].update_with_result(result)
models["terminal"].update_with_result(result)
def on_was_acted(self, result):
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
# Update action with result
model_ = self.data["models"]["plugins"]
index = model_.items.index(result["plugin"])
index = model_.createIndex(index, 0)
model_.setData(index, not result["success"], model.ActionFailed)
model_.setData(index, False, model.IsProcessing)
models = self.data["models"]
models["terminal"].update_with_result(result)
self.on_finished()
# -------------------------------------------------------------------------
#
# Functions
#
# -------------------------------------------------------------------------
def reset(self):
"""Prepare GUI for reset"""
self.info(self.tr("About to reset.."))
models = self.data["models"]
models["instances"].store_checkstate()
models["plugins"].store_checkstate()
# Reset current ids to secure no previous instances get mixed in.
models["instances"].ids = []
for m in models.values():
m.reset()
for b in self.data["buttons"].values():
b.hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
util.defer(500, self.controller.reset)
def validate(self):
self.info(self.tr("Preparing validate.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.validate)
def publish(self):
self.info(self.tr("Preparing publish.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.publish)
def act(self, plugin, action):
self.info("%s %s.." % (self.tr("Preparing"), action))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
self.controller.is_running = True
# Cause view to update, but it won't visually
# happen until Qt is given time to idle..
model_ = self.data["models"]["plugins"]
index = model_.items.index(plugin)
index = model_.createIndex(index, 0)
for key, value in {model.ActionIdle: False,
model.ActionFailed: False,
model.IsProcessing: True}.items():
model_.setData(index, value, key)
# Give Qt time to draw
util.defer(100, lambda: self.controller.act(plugin, action))
self.info(self.tr("Action prepared."))
def closeEvent(self, event):
"""Perform post-flight checks before closing
Make sure processing of any kind is wrapped up before closing
"""
# Make it snappy, but take care to clean it all up.
# TODO(marcus): Enable GUI to return on problem, such
# as asking whether or not the user really wants to quit
# given there are things currently running.
self.hide()
if self.data["state"]["is_closing"]:
# Explicitly clear potentially referenced data
self.info(self.tr("Cleaning up models.."))
for v in self.data["views"].values():
v.model().deleteLater()
v.setModel(None)
self.info(self.tr("Cleaning up terminal.."))
for item in self.data["models"]["terminal"].items:
del(item)
self.info(self.tr("Cleaning up controller.."))
self.controller.cleanup()
self.info(self.tr("All clean!"))
self.info(self.tr("Good bye"))
return super(Window, self).closeEvent(event)
self.info(self.tr("Closing.."))
def on_problem():
self.heads_up("Warning", "Had trouble closing down. "
"Please tell someone and try again.")
self.show()
if self.controller.is_running:
self.info(self.tr("..as soon as processing is finished.."))
self.controller.is_running = False
self.finished.connect(self.close)
util.defer(2000, on_problem)
return event.ignore()
self.data["state"]["is_closing"] = True
util.defer(200, self.close)
return event.ignore()
def reject(self):
"""Handle ESC key"""
if self.controller.is_running:
self.info(self.tr("Stopping.."))
self.controller.is_running = False
# -------------------------------------------------------------------------
#
# Feedback
#
# -------------------------------------------------------------------------
def info(self, message):
"""Print user-facing information
Arguments:
message (str): Text message for the user
"""
info = self.findChild(QtWidgets.QLabel, "Info")
info.setText(message)
# Include message in terminal
self.data["models"]["terminal"].append({
"label": message,
"type": "info"
})
animation = self.data["animation"]["display_info"]
animation.stop()
animation.start()
# TODO(marcus): Should this be configurable? Do we want
# the shell to fill up with these messages?
util.u_print(message)
def warning(self, message):
"""Block processing and print warning until user hits "Continue"
Arguments:
message (str): Message to display
"""
# TODO(marcus): Implement this.
self.info(message)
def heads_up(self, title, message, command=None):
"""Provide a front-and-center message with optional command
Arguments:
title (str): Bold and short message
message (str): Extended message
command (optional, callable): Function is provided as a button
"""
# TODO(marcus): Implement this.
self.info(message)
|
pyblish/pyblish-lite | pyblish_lite/window.py | Window.reset | python | def reset(self):
self.info(self.tr("About to reset.."))
models = self.data["models"]
models["instances"].store_checkstate()
models["plugins"].store_checkstate()
# Reset current ids to secure no previous instances get mixed in.
models["instances"].ids = []
for m in models.values():
m.reset()
for b in self.data["buttons"].values():
b.hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
util.defer(500, self.controller.reset) | Prepare GUI for reset | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/window.py#L887-L908 | [
"def defer(delay, func):\n \"\"\"Append artificial delay to `func`\n\n This aids in keeping the GUI responsive, but complicates logic\n when producing tests. To combat this, the environment variable ensures\n that every operation is synchonous.\n\n Arguments:\n delay (float): Delay multiplier;... | class Window(QtWidgets.QDialog):
def __init__(self, controller, parent=None):
super(Window, self).__init__(parent)
icon = QtGui.QIcon(util.get_asset("img", "logo-extrasmall.png"))
self.setWindowFlags(self.windowFlags() |
QtCore.Qt.WindowTitleHint |
QtCore.Qt.WindowMaximizeButtonHint |
QtCore.Qt.WindowMinimizeButtonHint |
QtCore.Qt.WindowCloseButtonHint)
self.setWindowIcon(icon)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.controller = controller
"""General layout
__________________ _____________________
| | | | | |
| Header | --> | Tab | Tab | Tab |
|__________________| |_______|_______|_____|
| | _____________________
| | | |
| | | |
| Body | | |
| | --> | Page |
| | | |
| | |_____________________|
|__________________| _____________________
| | | | |
| Footer | | Status | Buttons |
|__________________| |___________|_________|
"""
header = QtWidgets.QWidget()
artist_tab = QtWidgets.QRadioButton()
overview_tab = QtWidgets.QRadioButton()
terminal_tab = QtWidgets.QRadioButton()
spacer = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(header)
layout.addWidget(artist_tab, 0)
layout.addWidget(overview_tab, 0)
layout.addWidget(terminal_tab, 0)
layout.addWidget(spacer, 1) # Compress items to the left
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Artist Page
__________________
| |
| | ------------ |
| | ----- |
| |
| | -------- |
| | ------- |
| |
|__________________|
"""
artist_page = QtWidgets.QWidget()
artist_view = view.Item()
artist_delegate = delegate.Artist()
artist_view.setItemDelegate(artist_delegate)
layout = QtWidgets.QVBoxLayout(artist_page)
layout.addWidget(artist_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Overview Page
___________________
| |
| o ----- o---- |
| o ---- o--- |
| o ---- o---- |
| o ---- o------ |
| |
|__________________|
"""
overview_page = QtWidgets.QWidget()
left_view = view.Item()
right_view = view.Item()
item_delegate = delegate.Item()
left_view.setItemDelegate(item_delegate)
right_view.setItemDelegate(item_delegate)
layout = QtWidgets.QHBoxLayout(overview_page)
layout.addWidget(left_view, 1)
layout.addWidget(right_view, 1)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Terminal
__________________
| |
| \ |
| \ |
| / |
| / ______ |
| |
|__________________|
"""
terminal_container = QtWidgets.QWidget()
terminal_delegate = delegate.Terminal()
terminal_view = view.LogView()
terminal_view.setItemDelegate(terminal_delegate)
layout = QtWidgets.QVBoxLayout(terminal_container)
layout.addWidget(terminal_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
terminal_footer = QtWidgets.QWidget()
search_box = QtWidgets.QLineEdit()
instance_combo = QtWidgets.QComboBox()
plugin_combo = QtWidgets.QComboBox()
show_errors = QtWidgets.QCheckBox()
show_records = QtWidgets.QCheckBox()
show_debug = QtWidgets.QCheckBox()
show_info = QtWidgets.QCheckBox()
show_warning = QtWidgets.QCheckBox()
show_error = QtWidgets.QCheckBox()
show_critical = QtWidgets.QCheckBox()
layout = QtWidgets.QHBoxLayout(terminal_footer)
for w in (search_box,
instance_combo,
plugin_combo,
show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
layout.addWidget(w)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(3)
terminal_page = QtWidgets.QWidget()
layout = QtWidgets.QVBoxLayout(terminal_page)
layout.addWidget(terminal_container)
# layout.addWidget(terminal_footer) # TODO
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
# Add some room between window borders and contents
body = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(body)
layout.setContentsMargins(5, 5, 5, 0)
layout.addWidget(artist_page)
layout.addWidget(overview_page)
layout.addWidget(terminal_page)
"""Comment Box
____________________________
|> My comment |
| |
|____________________________|
"""
comment_box = QtWidgets.QLineEdit()
comment_placeholder = QtWidgets.QLabel(
self.tr("Comment.."), comment_box)
comment_placeholder.move(2, 2)
comment_box.setEnabled(False)
comment_box.hide()
"""Details View
____________________________
| |
| An Item 23 ms |
| - family |
| |
|----------------------------|
| |
| Docstring |
|____________________________|
"""
details = view.Details(self)
"""Footer
______________________
| ___ ___ |
| | o || > ||
| |___||___||
|______________________|
"""
footer = QtWidgets.QWidget()
info = QtWidgets.QLabel()
spacer = QtWidgets.QWidget()
reset = QtWidgets.QPushButton(awesome["refresh"])
validate = QtWidgets.QPushButton(awesome["flask"])
play = QtWidgets.QPushButton(awesome["play"])
stop = QtWidgets.QPushButton(awesome["stop"])
layout = QtWidgets.QHBoxLayout(footer)
layout.setContentsMargins(5, 5, 5, 5)
layout.addWidget(info, 0)
layout.addWidget(spacer, 1)
layout.addWidget(reset, 0)
layout.addWidget(validate, 0)
layout.addWidget(play, 0)
layout.addWidget(stop, 0)
# Placeholder for when GUI is closing
# TODO(marcus): Fade to black and the the user about what's happening
closing_placeholder = QtWidgets.QWidget(self)
closing_placeholder.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
closing_placeholder.hide()
# Main layout
layout = QtWidgets.QVBoxLayout(self)
layout.addWidget(header, 0)
layout.addWidget(body, 3)
layout.addWidget(closing_placeholder, 1)
layout.addWidget(comment_box, 0)
layout.addWidget(footer, 0)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Animation
___
/ \
| | ___
\___/ / \
___ | |
/ \ \___/
| |
\___/
"""
# Display info
info_effect = QtWidgets.QGraphicsOpacityEffect(info)
info.setGraphicsEffect(info_effect)
timeline = QtCore.QSequentialAnimationGroup()
on = QtCore.QPropertyAnimation(info_effect, b"opacity")
on.setDuration(0)
on.setStartValue(0)
on.setEndValue(1)
off = QtCore.QPropertyAnimation(info_effect, b"opacity")
off.setDuration(0)
off.setStartValue(1)
off.setEndValue(0)
fade = QtCore.QPropertyAnimation(info_effect, b"opacity")
fade.setDuration(500)
fade.setStartValue(1.0)
fade.setEndValue(0.0)
timeline.addAnimation(on)
timeline.addPause(50)
timeline.addAnimation(off)
timeline.addPause(50)
timeline.addAnimation(on)
timeline.addPause(2000)
timeline.addAnimation(fade)
info_animation = timeline
"""Setup
Widgets are referred to in CSS via their object-name. We
use the same mechanism internally to refer to objects; so rather
than storing widgets as self.my_widget, it is referred to as:
>>> my_widget = self.findChild(QtWidgets.QWidget, "MyWidget")
This way there is only ever a single method of referring to any widget.
___
| |
/\/ \/\
/ _ \
\ / \ /
| | | |
/ \_/ \
\ /
\/\ /\/
|___|
"""
instance_model = model.Instance()
plugin_model = model.Plugin()
terminal_model = model.Terminal()
filter_model = model.ProxyModel(plugin_model)
artist_view.setModel(instance_model)
left_view.setModel(instance_model)
right_view.setModel(filter_model)
terminal_view.setModel(terminal_model)
instance_combo.setModel(instance_model)
plugin_combo.setModel(plugin_model)
names = {
# Main
"Header": header,
"Body": body,
"Footer": footer,
"Info": info,
# Modals
"Details": details,
# Pages
"Artist": artist_page,
"Overview": overview_page,
"Terminal": terminal_page,
# Tabs
"ArtistTab": artist_tab,
"OverviewTab": overview_tab,
"TerminalTab": terminal_tab,
# Buttons
"Play": play,
"Validate": validate,
"Reset": reset,
"Stop": stop,
# Misc
"CommentBox": comment_box,
"CommentPlaceholder": comment_placeholder,
"ClosingPlaceholder": closing_placeholder,
}
for name, w in names.items():
w.setObjectName(name)
# Enable CSS on plain QWidget objects
for w in (header,
body,
artist_page,
comment_box,
overview_page,
terminal_page,
footer,
play,
validate,
stop,
details,
reset,
closing_placeholder):
w.setAttribute(QtCore.Qt.WA_StyledBackground)
self.data = {
"views": {
"artist": artist_view,
"left": left_view,
"right": right_view,
"terminal": terminal_view,
},
"modals": {
"details": details,
},
"models": {
"instances": instance_model,
"plugins": plugin_model,
"filter": filter_model,
"terminal": terminal_model,
},
"terminal_toggles": {
"record": show_records,
"debug": show_debug,
"info": show_info,
"warning": show_warning,
"error": show_error,
"critical": show_critical
},
"tabs": {
"artist": artist_tab,
"overview": overview_tab,
"terminal": terminal_tab,
"current": "artist"
},
"pages": {
"artist": artist_page,
"overview": overview_page,
"terminal": terminal_page,
},
"buttons": {
"play": play,
"validate": validate,
"stop": stop,
"reset": reset
},
"animation": {
"display_info": info_animation,
},
"state": {
"is_closing": False,
}
}
# Pressing Enter defaults to Play
play.setFocus()
"""Signals
________ ________
|________|-->|________|
|
|
___v____
|________|
"""
artist_tab.toggled.connect(
lambda: self.on_tab_changed("artist"))
overview_tab.toggled.connect(
lambda: self.on_tab_changed("overview"))
terminal_tab.toggled.connect(
lambda: self.on_tab_changed("terminal"))
controller.was_reset.connect(self.on_was_reset)
controller.was_validated.connect(self.on_was_validated)
controller.was_published.connect(self.on_was_published)
controller.was_acted.connect(self.on_was_acted)
controller.was_finished.connect(self.on_finished)
# Discovery happens synchronously during reset, that's
# why it's important that this connection is triggered
# right away.
controller.was_discovered.connect(self.on_was_discovered,
QtCore.Qt.DirectConnection)
# This is called synchronously on each process
controller.was_processed.connect(self.on_was_processed,
QtCore.Qt.DirectConnection)
# NOTE: Listeners to this signal are run in the main thread
controller.about_to_process.connect(self.on_about_to_process,
QtCore.Qt.DirectConnection)
artist_view.toggled.connect(self.on_item_toggled)
left_view.toggled.connect(self.on_item_toggled)
right_view.toggled.connect(self.on_item_toggled)
artist_view.inspected.connect(self.on_item_inspected)
left_view.inspected.connect(self.on_item_inspected)
right_view.inspected.connect(self.on_item_inspected)
terminal_view.inspected.connect(self.on_item_inspected)
reset.clicked.connect(self.on_reset_clicked)
validate.clicked.connect(self.on_validate_clicked)
play.clicked.connect(self.on_play_clicked)
stop.clicked.connect(self.on_stop_clicked)
comment_box.textChanged.connect(self.on_comment_entered)
comment_box.returnPressed.connect(self.on_play_clicked)
right_view.customContextMenuRequested.connect(
self.on_plugin_action_menu_requested)
for box in (show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
box.setChecked(True)
self.data["tabs"][settings.InitialTab].setChecked(True)
# -------------------------------------------------------------------------
#
# Event handlers
#
# -------------------------------------------------------------------------
def on_item_expanded(self, index, state):
if not index.data(model.IsExpandable):
return
if state is None:
state = not index.data(model.Expanded)
# Collapse others
for i in index.model():
index.model().setData(i, False, model.Expanded)
index.model().setData(index, state, model.Expanded)
def on_item_inspected(self, index):
details = self.data["modals"]["details"]
details.move(QtGui.QCursor.pos())
if index.data(model.Type) == "record":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["circle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "LogRecord (%s)" % index.data(model.LogLevel),
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "error":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["exclamation-triangle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "Exception",
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "plugin":
details.show({
"icon": index.data(model.Icon) or awesome["filter"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": index.data(model.Docstring) or "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
elif index.data(model.Type) == "instance":
details.show({
"icon": index.data(model.Icon) or awesome["file"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
def on_item_toggled(self, index, state=None):
"""An item is requesting to be toggled"""
if not index.data(model.IsIdle):
return self.info("Cannot toggle")
if not index.data(model.IsOptional):
return self.info("This item is mandatory")
if state is None:
state = not index.data(model.IsChecked)
index.model().setData(index, state, model.IsChecked)
# Withdraw option to publish if no instances are toggled
play = self.findChild(QtWidgets.QWidget, "Play")
validate = self.findChild(QtWidgets.QWidget, "Validate")
any_instances = any(index.data(model.IsChecked)
for index in self.data["models"]["instances"])
play.setEnabled(any_instances)
validate.setEnabled(any_instances)
# Emit signals
if index.data(model.Type) == "instance":
instance = self.data["models"]["instances"].items[index.row()]
util.defer(
100, lambda: self.controller.emit_(
signal="instanceToggled",
kwargs={"new_value": state,
"old_value": not state,
"instance": instance}))
if index.data(model.Type) == "plugin":
util.defer(
100, lambda: self.controller.emit_(
signal="pluginToggled",
kwargs={"new_value": state,
"old_value": not state,
"plugin": index.data(model.Object)}))
def on_tab_changed(self, target):
for page in self.data["pages"].values():
page.hide()
page = self.data["pages"][target]
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
if target == "terminal":
comment_box.hide()
else:
comment_box.setVisible(comment_box.isEnabled())
page.show()
self.data["tabs"]["current"] = target
def on_validate_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.validate()
def on_play_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.publish()
def on_reset_clicked(self):
self.reset()
def on_stop_clicked(self):
self.info("Stopping..")
self.controller.is_running = False
def on_comment_entered(self):
"""The user has typed a comment"""
text_edit = self.findChild(QtWidgets.QWidget, "CommentBox")
comment = text_edit.text()
# Store within context
context = self.controller.context
context.data["comment"] = comment
placeholder = self.findChild(QtWidgets.QLabel, "CommentPlaceholder")
placeholder.setVisible(not comment)
def on_about_to_process(self, plugin, instance):
"""Reflect currently running pair in GUI"""
if instance is not None:
instance_model = self.data["models"]["instances"]
index = instance_model.items.index(instance)
index = instance_model.createIndex(index, 0)
instance_model.setData(index, True, model.IsProcessing)
plugin_model = self.data["models"]["plugins"]
index = plugin_model.items.index(plugin)
index = plugin_model.createIndex(index, 0)
plugin_model.setData(index, True, model.IsProcessing)
self.info("%s %s" % (self.tr("Processing"), index.data(model.Label)))
def on_plugin_action_menu_requested(self, pos):
"""The user right-clicked on a plug-in
__________
| |
| Action 1 |
| Action 2 |
| Action 3 |
| |
|__________|
"""
index = self.data["views"]["right"].indexAt(pos)
actions = index.data(model.Actions)
if not actions:
return
menu = QtWidgets.QMenu(self)
plugins_index = self.data["models"]["filter"].mapToSource(index)
plugin = self.data["models"]["plugins"].items[plugins_index.row()]
print("plugin is: %s" % plugin)
for action in actions:
qaction = QtWidgets.QAction(action.label or action.__name__, self)
qaction.triggered.connect(partial(self.act, plugin, action))
menu.addAction(qaction)
menu.popup(self.data["views"]["right"].viewport().mapToGlobal(pos))
def on_was_discovered(self):
models = self.data["models"]
for Plugin in self.controller.plugins:
models["plugins"].append(Plugin)
def on_was_reset(self):
models = self.data["models"]
self.info(self.tr("Finishing up reset.."))
models["instances"].reset()
for instance in self.controller.context:
models["instances"].append(instance)
buttons = self.data["buttons"]
buttons["play"].show()
buttons["validate"].show()
buttons["reset"].show()
buttons["stop"].hide()
models["instances"].restore_checkstate()
models["plugins"].restore_checkstate()
# Append placeholder comment from Context
# This allows users to inject a comment from elsewhere,
# or to perhaps provide a placeholder comment/template
# for artists to fill in.
comment = self.controller.context.data.get("comment")
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setText(comment or None)
comment_box.setEnabled(comment is not None)
# Refresh tab
self.on_tab_changed(self.data["tabs"]["current"])
self.controller.current_error = None
self.on_finished()
def on_was_validated(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["play"].show()
buttons["stop"].hide()
self.on_finished()
def on_was_published(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
self.on_finished()
def on_was_processed(self, result):
models = self.data["models"]
for instance in self.controller.context:
if instance.id not in models["instances"].ids:
models["instances"].append(instance)
family = instance.data["family"]
if family:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=family)
families = instance.data.get("families")
if families:
for f in families:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=f)
models["plugins"].update_with_result(result)
models["instances"].update_with_result(result)
models["terminal"].update_with_result(result)
def on_was_acted(self, result):
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
# Update action with result
model_ = self.data["models"]["plugins"]
index = model_.items.index(result["plugin"])
index = model_.createIndex(index, 0)
model_.setData(index, not result["success"], model.ActionFailed)
model_.setData(index, False, model.IsProcessing)
models = self.data["models"]
models["terminal"].update_with_result(result)
self.on_finished()
def on_finished(self):
"""Finished signal handler"""
self.controller.is_running = False
error = self.controller.current_error
if error is not None:
self.info(self.tr("Stopped due to error(s), see Terminal."))
else:
self.info(self.tr("Finished successfully!"))
# -------------------------------------------------------------------------
#
# Functions
#
# -------------------------------------------------------------------------
def validate(self):
self.info(self.tr("Preparing validate.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.validate)
def publish(self):
self.info(self.tr("Preparing publish.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.publish)
def act(self, plugin, action):
self.info("%s %s.." % (self.tr("Preparing"), action))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
self.controller.is_running = True
# Cause view to update, but it won't visually
# happen until Qt is given time to idle..
model_ = self.data["models"]["plugins"]
index = model_.items.index(plugin)
index = model_.createIndex(index, 0)
for key, value in {model.ActionIdle: False,
model.ActionFailed: False,
model.IsProcessing: True}.items():
model_.setData(index, value, key)
# Give Qt time to draw
util.defer(100, lambda: self.controller.act(plugin, action))
self.info(self.tr("Action prepared."))
def closeEvent(self, event):
"""Perform post-flight checks before closing
Make sure processing of any kind is wrapped up before closing
"""
# Make it snappy, but take care to clean it all up.
# TODO(marcus): Enable GUI to return on problem, such
# as asking whether or not the user really wants to quit
# given there are things currently running.
self.hide()
if self.data["state"]["is_closing"]:
# Explicitly clear potentially referenced data
self.info(self.tr("Cleaning up models.."))
for v in self.data["views"].values():
v.model().deleteLater()
v.setModel(None)
self.info(self.tr("Cleaning up terminal.."))
for item in self.data["models"]["terminal"].items:
del(item)
self.info(self.tr("Cleaning up controller.."))
self.controller.cleanup()
self.info(self.tr("All clean!"))
self.info(self.tr("Good bye"))
return super(Window, self).closeEvent(event)
self.info(self.tr("Closing.."))
def on_problem():
self.heads_up("Warning", "Had trouble closing down. "
"Please tell someone and try again.")
self.show()
if self.controller.is_running:
self.info(self.tr("..as soon as processing is finished.."))
self.controller.is_running = False
self.finished.connect(self.close)
util.defer(2000, on_problem)
return event.ignore()
self.data["state"]["is_closing"] = True
util.defer(200, self.close)
return event.ignore()
def reject(self):
"""Handle ESC key"""
if self.controller.is_running:
self.info(self.tr("Stopping.."))
self.controller.is_running = False
# -------------------------------------------------------------------------
#
# Feedback
#
# -------------------------------------------------------------------------
def info(self, message):
"""Print user-facing information
Arguments:
message (str): Text message for the user
"""
info = self.findChild(QtWidgets.QLabel, "Info")
info.setText(message)
# Include message in terminal
self.data["models"]["terminal"].append({
"label": message,
"type": "info"
})
animation = self.data["animation"]["display_info"]
animation.stop()
animation.start()
# TODO(marcus): Should this be configurable? Do we want
# the shell to fill up with these messages?
util.u_print(message)
def warning(self, message):
"""Block processing and print warning until user hits "Continue"
Arguments:
message (str): Message to display
"""
# TODO(marcus): Implement this.
self.info(message)
def heads_up(self, title, message, command=None):
"""Provide a front-and-center message with optional command
Arguments:
title (str): Bold and short message
message (str): Extended message
command (optional, callable): Function is provided as a button
"""
# TODO(marcus): Implement this.
self.info(message)
|
pyblish/pyblish-lite | pyblish_lite/window.py | Window.closeEvent | python | def closeEvent(self, event):
# Make it snappy, but take care to clean it all up.
# TODO(marcus): Enable GUI to return on problem, such
# as asking whether or not the user really wants to quit
# given there are things currently running.
self.hide()
if self.data["state"]["is_closing"]:
# Explicitly clear potentially referenced data
self.info(self.tr("Cleaning up models.."))
for v in self.data["views"].values():
v.model().deleteLater()
v.setModel(None)
self.info(self.tr("Cleaning up terminal.."))
for item in self.data["models"]["terminal"].items:
del(item)
self.info(self.tr("Cleaning up controller.."))
self.controller.cleanup()
self.info(self.tr("All clean!"))
self.info(self.tr("Good bye"))
return super(Window, self).closeEvent(event)
self.info(self.tr("Closing.."))
def on_problem():
self.heads_up("Warning", "Had trouble closing down. "
"Please tell someone and try again.")
self.show()
if self.controller.is_running:
self.info(self.tr("..as soon as processing is finished.."))
self.controller.is_running = False
self.finished.connect(self.close)
util.defer(2000, on_problem)
return event.ignore()
self.data["state"]["is_closing"] = True
util.defer(200, self.close)
return event.ignore() | Perform post-flight checks before closing
Make sure processing of any kind is wrapped up before closing | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/window.py#L953-L1002 | [
"def defer(delay, func):\n \"\"\"Append artificial delay to `func`\n\n This aids in keeping the GUI responsive, but complicates logic\n when producing tests. To combat this, the environment variable ensures\n that every operation is synchonous.\n\n Arguments:\n delay (float): Delay multiplier;... | class Window(QtWidgets.QDialog):
def __init__(self, controller, parent=None):
super(Window, self).__init__(parent)
icon = QtGui.QIcon(util.get_asset("img", "logo-extrasmall.png"))
self.setWindowFlags(self.windowFlags() |
QtCore.Qt.WindowTitleHint |
QtCore.Qt.WindowMaximizeButtonHint |
QtCore.Qt.WindowMinimizeButtonHint |
QtCore.Qt.WindowCloseButtonHint)
self.setWindowIcon(icon)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.controller = controller
"""General layout
__________________ _____________________
| | | | | |
| Header | --> | Tab | Tab | Tab |
|__________________| |_______|_______|_____|
| | _____________________
| | | |
| | | |
| Body | | |
| | --> | Page |
| | | |
| | |_____________________|
|__________________| _____________________
| | | | |
| Footer | | Status | Buttons |
|__________________| |___________|_________|
"""
header = QtWidgets.QWidget()
artist_tab = QtWidgets.QRadioButton()
overview_tab = QtWidgets.QRadioButton()
terminal_tab = QtWidgets.QRadioButton()
spacer = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(header)
layout.addWidget(artist_tab, 0)
layout.addWidget(overview_tab, 0)
layout.addWidget(terminal_tab, 0)
layout.addWidget(spacer, 1) # Compress items to the left
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Artist Page
__________________
| |
| | ------------ |
| | ----- |
| |
| | -------- |
| | ------- |
| |
|__________________|
"""
artist_page = QtWidgets.QWidget()
artist_view = view.Item()
artist_delegate = delegate.Artist()
artist_view.setItemDelegate(artist_delegate)
layout = QtWidgets.QVBoxLayout(artist_page)
layout.addWidget(artist_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Overview Page
___________________
| |
| o ----- o---- |
| o ---- o--- |
| o ---- o---- |
| o ---- o------ |
| |
|__________________|
"""
overview_page = QtWidgets.QWidget()
left_view = view.Item()
right_view = view.Item()
item_delegate = delegate.Item()
left_view.setItemDelegate(item_delegate)
right_view.setItemDelegate(item_delegate)
layout = QtWidgets.QHBoxLayout(overview_page)
layout.addWidget(left_view, 1)
layout.addWidget(right_view, 1)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Terminal
__________________
| |
| \ |
| \ |
| / |
| / ______ |
| |
|__________________|
"""
terminal_container = QtWidgets.QWidget()
terminal_delegate = delegate.Terminal()
terminal_view = view.LogView()
terminal_view.setItemDelegate(terminal_delegate)
layout = QtWidgets.QVBoxLayout(terminal_container)
layout.addWidget(terminal_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
terminal_footer = QtWidgets.QWidget()
search_box = QtWidgets.QLineEdit()
instance_combo = QtWidgets.QComboBox()
plugin_combo = QtWidgets.QComboBox()
show_errors = QtWidgets.QCheckBox()
show_records = QtWidgets.QCheckBox()
show_debug = QtWidgets.QCheckBox()
show_info = QtWidgets.QCheckBox()
show_warning = QtWidgets.QCheckBox()
show_error = QtWidgets.QCheckBox()
show_critical = QtWidgets.QCheckBox()
layout = QtWidgets.QHBoxLayout(terminal_footer)
for w in (search_box,
instance_combo,
plugin_combo,
show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
layout.addWidget(w)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(3)
terminal_page = QtWidgets.QWidget()
layout = QtWidgets.QVBoxLayout(terminal_page)
layout.addWidget(terminal_container)
# layout.addWidget(terminal_footer) # TODO
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
# Add some room between window borders and contents
body = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(body)
layout.setContentsMargins(5, 5, 5, 0)
layout.addWidget(artist_page)
layout.addWidget(overview_page)
layout.addWidget(terminal_page)
"""Comment Box
____________________________
|> My comment |
| |
|____________________________|
"""
comment_box = QtWidgets.QLineEdit()
comment_placeholder = QtWidgets.QLabel(
self.tr("Comment.."), comment_box)
comment_placeholder.move(2, 2)
comment_box.setEnabled(False)
comment_box.hide()
"""Details View
____________________________
| |
| An Item 23 ms |
| - family |
| |
|----------------------------|
| |
| Docstring |
|____________________________|
"""
details = view.Details(self)
"""Footer
______________________
| ___ ___ |
| | o || > ||
| |___||___||
|______________________|
"""
footer = QtWidgets.QWidget()
info = QtWidgets.QLabel()
spacer = QtWidgets.QWidget()
reset = QtWidgets.QPushButton(awesome["refresh"])
validate = QtWidgets.QPushButton(awesome["flask"])
play = QtWidgets.QPushButton(awesome["play"])
stop = QtWidgets.QPushButton(awesome["stop"])
layout = QtWidgets.QHBoxLayout(footer)
layout.setContentsMargins(5, 5, 5, 5)
layout.addWidget(info, 0)
layout.addWidget(spacer, 1)
layout.addWidget(reset, 0)
layout.addWidget(validate, 0)
layout.addWidget(play, 0)
layout.addWidget(stop, 0)
# Placeholder for when GUI is closing
# TODO(marcus): Fade to black and the the user about what's happening
closing_placeholder = QtWidgets.QWidget(self)
closing_placeholder.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
closing_placeholder.hide()
# Main layout
layout = QtWidgets.QVBoxLayout(self)
layout.addWidget(header, 0)
layout.addWidget(body, 3)
layout.addWidget(closing_placeholder, 1)
layout.addWidget(comment_box, 0)
layout.addWidget(footer, 0)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Animation
___
/ \
| | ___
\___/ / \
___ | |
/ \ \___/
| |
\___/
"""
# Display info
info_effect = QtWidgets.QGraphicsOpacityEffect(info)
info.setGraphicsEffect(info_effect)
timeline = QtCore.QSequentialAnimationGroup()
on = QtCore.QPropertyAnimation(info_effect, b"opacity")
on.setDuration(0)
on.setStartValue(0)
on.setEndValue(1)
off = QtCore.QPropertyAnimation(info_effect, b"opacity")
off.setDuration(0)
off.setStartValue(1)
off.setEndValue(0)
fade = QtCore.QPropertyAnimation(info_effect, b"opacity")
fade.setDuration(500)
fade.setStartValue(1.0)
fade.setEndValue(0.0)
timeline.addAnimation(on)
timeline.addPause(50)
timeline.addAnimation(off)
timeline.addPause(50)
timeline.addAnimation(on)
timeline.addPause(2000)
timeline.addAnimation(fade)
info_animation = timeline
"""Setup
Widgets are referred to in CSS via their object-name. We
use the same mechanism internally to refer to objects; so rather
than storing widgets as self.my_widget, it is referred to as:
>>> my_widget = self.findChild(QtWidgets.QWidget, "MyWidget")
This way there is only ever a single method of referring to any widget.
___
| |
/\/ \/\
/ _ \
\ / \ /
| | | |
/ \_/ \
\ /
\/\ /\/
|___|
"""
instance_model = model.Instance()
plugin_model = model.Plugin()
terminal_model = model.Terminal()
filter_model = model.ProxyModel(plugin_model)
artist_view.setModel(instance_model)
left_view.setModel(instance_model)
right_view.setModel(filter_model)
terminal_view.setModel(terminal_model)
instance_combo.setModel(instance_model)
plugin_combo.setModel(plugin_model)
names = {
# Main
"Header": header,
"Body": body,
"Footer": footer,
"Info": info,
# Modals
"Details": details,
# Pages
"Artist": artist_page,
"Overview": overview_page,
"Terminal": terminal_page,
# Tabs
"ArtistTab": artist_tab,
"OverviewTab": overview_tab,
"TerminalTab": terminal_tab,
# Buttons
"Play": play,
"Validate": validate,
"Reset": reset,
"Stop": stop,
# Misc
"CommentBox": comment_box,
"CommentPlaceholder": comment_placeholder,
"ClosingPlaceholder": closing_placeholder,
}
for name, w in names.items():
w.setObjectName(name)
# Enable CSS on plain QWidget objects
for w in (header,
body,
artist_page,
comment_box,
overview_page,
terminal_page,
footer,
play,
validate,
stop,
details,
reset,
closing_placeholder):
w.setAttribute(QtCore.Qt.WA_StyledBackground)
self.data = {
"views": {
"artist": artist_view,
"left": left_view,
"right": right_view,
"terminal": terminal_view,
},
"modals": {
"details": details,
},
"models": {
"instances": instance_model,
"plugins": plugin_model,
"filter": filter_model,
"terminal": terminal_model,
},
"terminal_toggles": {
"record": show_records,
"debug": show_debug,
"info": show_info,
"warning": show_warning,
"error": show_error,
"critical": show_critical
},
"tabs": {
"artist": artist_tab,
"overview": overview_tab,
"terminal": terminal_tab,
"current": "artist"
},
"pages": {
"artist": artist_page,
"overview": overview_page,
"terminal": terminal_page,
},
"buttons": {
"play": play,
"validate": validate,
"stop": stop,
"reset": reset
},
"animation": {
"display_info": info_animation,
},
"state": {
"is_closing": False,
}
}
# Pressing Enter defaults to Play
play.setFocus()
"""Signals
________ ________
|________|-->|________|
|
|
___v____
|________|
"""
artist_tab.toggled.connect(
lambda: self.on_tab_changed("artist"))
overview_tab.toggled.connect(
lambda: self.on_tab_changed("overview"))
terminal_tab.toggled.connect(
lambda: self.on_tab_changed("terminal"))
controller.was_reset.connect(self.on_was_reset)
controller.was_validated.connect(self.on_was_validated)
controller.was_published.connect(self.on_was_published)
controller.was_acted.connect(self.on_was_acted)
controller.was_finished.connect(self.on_finished)
# Discovery happens synchronously during reset, that's
# why it's important that this connection is triggered
# right away.
controller.was_discovered.connect(self.on_was_discovered,
QtCore.Qt.DirectConnection)
# This is called synchronously on each process
controller.was_processed.connect(self.on_was_processed,
QtCore.Qt.DirectConnection)
# NOTE: Listeners to this signal are run in the main thread
controller.about_to_process.connect(self.on_about_to_process,
QtCore.Qt.DirectConnection)
artist_view.toggled.connect(self.on_item_toggled)
left_view.toggled.connect(self.on_item_toggled)
right_view.toggled.connect(self.on_item_toggled)
artist_view.inspected.connect(self.on_item_inspected)
left_view.inspected.connect(self.on_item_inspected)
right_view.inspected.connect(self.on_item_inspected)
terminal_view.inspected.connect(self.on_item_inspected)
reset.clicked.connect(self.on_reset_clicked)
validate.clicked.connect(self.on_validate_clicked)
play.clicked.connect(self.on_play_clicked)
stop.clicked.connect(self.on_stop_clicked)
comment_box.textChanged.connect(self.on_comment_entered)
comment_box.returnPressed.connect(self.on_play_clicked)
right_view.customContextMenuRequested.connect(
self.on_plugin_action_menu_requested)
for box in (show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
box.setChecked(True)
self.data["tabs"][settings.InitialTab].setChecked(True)
# -------------------------------------------------------------------------
#
# Event handlers
#
# -------------------------------------------------------------------------
def on_item_expanded(self, index, state):
if not index.data(model.IsExpandable):
return
if state is None:
state = not index.data(model.Expanded)
# Collapse others
for i in index.model():
index.model().setData(i, False, model.Expanded)
index.model().setData(index, state, model.Expanded)
def on_item_inspected(self, index):
details = self.data["modals"]["details"]
details.move(QtGui.QCursor.pos())
if index.data(model.Type) == "record":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["circle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "LogRecord (%s)" % index.data(model.LogLevel),
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "error":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["exclamation-triangle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "Exception",
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "plugin":
details.show({
"icon": index.data(model.Icon) or awesome["filter"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": index.data(model.Docstring) or "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
elif index.data(model.Type) == "instance":
details.show({
"icon": index.data(model.Icon) or awesome["file"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
def on_item_toggled(self, index, state=None):
"""An item is requesting to be toggled"""
if not index.data(model.IsIdle):
return self.info("Cannot toggle")
if not index.data(model.IsOptional):
return self.info("This item is mandatory")
if state is None:
state = not index.data(model.IsChecked)
index.model().setData(index, state, model.IsChecked)
# Withdraw option to publish if no instances are toggled
play = self.findChild(QtWidgets.QWidget, "Play")
validate = self.findChild(QtWidgets.QWidget, "Validate")
any_instances = any(index.data(model.IsChecked)
for index in self.data["models"]["instances"])
play.setEnabled(any_instances)
validate.setEnabled(any_instances)
# Emit signals
if index.data(model.Type) == "instance":
instance = self.data["models"]["instances"].items[index.row()]
util.defer(
100, lambda: self.controller.emit_(
signal="instanceToggled",
kwargs={"new_value": state,
"old_value": not state,
"instance": instance}))
if index.data(model.Type) == "plugin":
util.defer(
100, lambda: self.controller.emit_(
signal="pluginToggled",
kwargs={"new_value": state,
"old_value": not state,
"plugin": index.data(model.Object)}))
def on_tab_changed(self, target):
for page in self.data["pages"].values():
page.hide()
page = self.data["pages"][target]
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
if target == "terminal":
comment_box.hide()
else:
comment_box.setVisible(comment_box.isEnabled())
page.show()
self.data["tabs"]["current"] = target
def on_validate_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.validate()
def on_play_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.publish()
def on_reset_clicked(self):
self.reset()
def on_stop_clicked(self):
self.info("Stopping..")
self.controller.is_running = False
def on_comment_entered(self):
"""The user has typed a comment"""
text_edit = self.findChild(QtWidgets.QWidget, "CommentBox")
comment = text_edit.text()
# Store within context
context = self.controller.context
context.data["comment"] = comment
placeholder = self.findChild(QtWidgets.QLabel, "CommentPlaceholder")
placeholder.setVisible(not comment)
def on_about_to_process(self, plugin, instance):
"""Reflect currently running pair in GUI"""
if instance is not None:
instance_model = self.data["models"]["instances"]
index = instance_model.items.index(instance)
index = instance_model.createIndex(index, 0)
instance_model.setData(index, True, model.IsProcessing)
plugin_model = self.data["models"]["plugins"]
index = plugin_model.items.index(plugin)
index = plugin_model.createIndex(index, 0)
plugin_model.setData(index, True, model.IsProcessing)
self.info("%s %s" % (self.tr("Processing"), index.data(model.Label)))
def on_plugin_action_menu_requested(self, pos):
"""The user right-clicked on a plug-in
__________
| |
| Action 1 |
| Action 2 |
| Action 3 |
| |
|__________|
"""
index = self.data["views"]["right"].indexAt(pos)
actions = index.data(model.Actions)
if not actions:
return
menu = QtWidgets.QMenu(self)
plugins_index = self.data["models"]["filter"].mapToSource(index)
plugin = self.data["models"]["plugins"].items[plugins_index.row()]
print("plugin is: %s" % plugin)
for action in actions:
qaction = QtWidgets.QAction(action.label or action.__name__, self)
qaction.triggered.connect(partial(self.act, plugin, action))
menu.addAction(qaction)
menu.popup(self.data["views"]["right"].viewport().mapToGlobal(pos))
def on_was_discovered(self):
models = self.data["models"]
for Plugin in self.controller.plugins:
models["plugins"].append(Plugin)
def on_was_reset(self):
models = self.data["models"]
self.info(self.tr("Finishing up reset.."))
models["instances"].reset()
for instance in self.controller.context:
models["instances"].append(instance)
buttons = self.data["buttons"]
buttons["play"].show()
buttons["validate"].show()
buttons["reset"].show()
buttons["stop"].hide()
models["instances"].restore_checkstate()
models["plugins"].restore_checkstate()
# Append placeholder comment from Context
# This allows users to inject a comment from elsewhere,
# or to perhaps provide a placeholder comment/template
# for artists to fill in.
comment = self.controller.context.data.get("comment")
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setText(comment or None)
comment_box.setEnabled(comment is not None)
# Refresh tab
self.on_tab_changed(self.data["tabs"]["current"])
self.controller.current_error = None
self.on_finished()
def on_was_validated(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["play"].show()
buttons["stop"].hide()
self.on_finished()
def on_was_published(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
self.on_finished()
def on_was_processed(self, result):
models = self.data["models"]
for instance in self.controller.context:
if instance.id not in models["instances"].ids:
models["instances"].append(instance)
family = instance.data["family"]
if family:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=family)
families = instance.data.get("families")
if families:
for f in families:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=f)
models["plugins"].update_with_result(result)
models["instances"].update_with_result(result)
models["terminal"].update_with_result(result)
def on_was_acted(self, result):
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
# Update action with result
model_ = self.data["models"]["plugins"]
index = model_.items.index(result["plugin"])
index = model_.createIndex(index, 0)
model_.setData(index, not result["success"], model.ActionFailed)
model_.setData(index, False, model.IsProcessing)
models = self.data["models"]
models["terminal"].update_with_result(result)
self.on_finished()
def on_finished(self):
"""Finished signal handler"""
self.controller.is_running = False
error = self.controller.current_error
if error is not None:
self.info(self.tr("Stopped due to error(s), see Terminal."))
else:
self.info(self.tr("Finished successfully!"))
# -------------------------------------------------------------------------
#
# Functions
#
# -------------------------------------------------------------------------
def reset(self):
"""Prepare GUI for reset"""
self.info(self.tr("About to reset.."))
models = self.data["models"]
models["instances"].store_checkstate()
models["plugins"].store_checkstate()
# Reset current ids to secure no previous instances get mixed in.
models["instances"].ids = []
for m in models.values():
m.reset()
for b in self.data["buttons"].values():
b.hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
util.defer(500, self.controller.reset)
def validate(self):
self.info(self.tr("Preparing validate.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.validate)
def publish(self):
self.info(self.tr("Preparing publish.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.publish)
def act(self, plugin, action):
self.info("%s %s.." % (self.tr("Preparing"), action))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
self.controller.is_running = True
# Cause view to update, but it won't visually
# happen until Qt is given time to idle..
model_ = self.data["models"]["plugins"]
index = model_.items.index(plugin)
index = model_.createIndex(index, 0)
for key, value in {model.ActionIdle: False,
model.ActionFailed: False,
model.IsProcessing: True}.items():
model_.setData(index, value, key)
# Give Qt time to draw
util.defer(100, lambda: self.controller.act(plugin, action))
self.info(self.tr("Action prepared."))
def reject(self):
"""Handle ESC key"""
if self.controller.is_running:
self.info(self.tr("Stopping.."))
self.controller.is_running = False
# -------------------------------------------------------------------------
#
# Feedback
#
# -------------------------------------------------------------------------
def info(self, message):
"""Print user-facing information
Arguments:
message (str): Text message for the user
"""
info = self.findChild(QtWidgets.QLabel, "Info")
info.setText(message)
# Include message in terminal
self.data["models"]["terminal"].append({
"label": message,
"type": "info"
})
animation = self.data["animation"]["display_info"]
animation.stop()
animation.start()
# TODO(marcus): Should this be configurable? Do we want
# the shell to fill up with these messages?
util.u_print(message)
def warning(self, message):
"""Block processing and print warning until user hits "Continue"
Arguments:
message (str): Message to display
"""
# TODO(marcus): Implement this.
self.info(message)
def heads_up(self, title, message, command=None):
"""Provide a front-and-center message with optional command
Arguments:
title (str): Bold and short message
message (str): Extended message
command (optional, callable): Function is provided as a button
"""
# TODO(marcus): Implement this.
self.info(message)
|
pyblish/pyblish-lite | pyblish_lite/window.py | Window.reject | python | def reject(self):
if self.controller.is_running:
self.info(self.tr("Stopping.."))
self.controller.is_running = False | Handle ESC key | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/window.py#L1004-L1009 | null | class Window(QtWidgets.QDialog):
def __init__(self, controller, parent=None):
super(Window, self).__init__(parent)
icon = QtGui.QIcon(util.get_asset("img", "logo-extrasmall.png"))
self.setWindowFlags(self.windowFlags() |
QtCore.Qt.WindowTitleHint |
QtCore.Qt.WindowMaximizeButtonHint |
QtCore.Qt.WindowMinimizeButtonHint |
QtCore.Qt.WindowCloseButtonHint)
self.setWindowIcon(icon)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.controller = controller
"""General layout
__________________ _____________________
| | | | | |
| Header | --> | Tab | Tab | Tab |
|__________________| |_______|_______|_____|
| | _____________________
| | | |
| | | |
| Body | | |
| | --> | Page |
| | | |
| | |_____________________|
|__________________| _____________________
| | | | |
| Footer | | Status | Buttons |
|__________________| |___________|_________|
"""
header = QtWidgets.QWidget()
artist_tab = QtWidgets.QRadioButton()
overview_tab = QtWidgets.QRadioButton()
terminal_tab = QtWidgets.QRadioButton()
spacer = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(header)
layout.addWidget(artist_tab, 0)
layout.addWidget(overview_tab, 0)
layout.addWidget(terminal_tab, 0)
layout.addWidget(spacer, 1) # Compress items to the left
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Artist Page
__________________
| |
| | ------------ |
| | ----- |
| |
| | -------- |
| | ------- |
| |
|__________________|
"""
artist_page = QtWidgets.QWidget()
artist_view = view.Item()
artist_delegate = delegate.Artist()
artist_view.setItemDelegate(artist_delegate)
layout = QtWidgets.QVBoxLayout(artist_page)
layout.addWidget(artist_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Overview Page
___________________
| |
| o ----- o---- |
| o ---- o--- |
| o ---- o---- |
| o ---- o------ |
| |
|__________________|
"""
overview_page = QtWidgets.QWidget()
left_view = view.Item()
right_view = view.Item()
item_delegate = delegate.Item()
left_view.setItemDelegate(item_delegate)
right_view.setItemDelegate(item_delegate)
layout = QtWidgets.QHBoxLayout(overview_page)
layout.addWidget(left_view, 1)
layout.addWidget(right_view, 1)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Terminal
__________________
| |
| \ |
| \ |
| / |
| / ______ |
| |
|__________________|
"""
terminal_container = QtWidgets.QWidget()
terminal_delegate = delegate.Terminal()
terminal_view = view.LogView()
terminal_view.setItemDelegate(terminal_delegate)
layout = QtWidgets.QVBoxLayout(terminal_container)
layout.addWidget(terminal_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
terminal_footer = QtWidgets.QWidget()
search_box = QtWidgets.QLineEdit()
instance_combo = QtWidgets.QComboBox()
plugin_combo = QtWidgets.QComboBox()
show_errors = QtWidgets.QCheckBox()
show_records = QtWidgets.QCheckBox()
show_debug = QtWidgets.QCheckBox()
show_info = QtWidgets.QCheckBox()
show_warning = QtWidgets.QCheckBox()
show_error = QtWidgets.QCheckBox()
show_critical = QtWidgets.QCheckBox()
layout = QtWidgets.QHBoxLayout(terminal_footer)
for w in (search_box,
instance_combo,
plugin_combo,
show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
layout.addWidget(w)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(3)
terminal_page = QtWidgets.QWidget()
layout = QtWidgets.QVBoxLayout(terminal_page)
layout.addWidget(terminal_container)
# layout.addWidget(terminal_footer) # TODO
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
# Add some room between window borders and contents
body = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(body)
layout.setContentsMargins(5, 5, 5, 0)
layout.addWidget(artist_page)
layout.addWidget(overview_page)
layout.addWidget(terminal_page)
"""Comment Box
____________________________
|> My comment |
| |
|____________________________|
"""
comment_box = QtWidgets.QLineEdit()
comment_placeholder = QtWidgets.QLabel(
self.tr("Comment.."), comment_box)
comment_placeholder.move(2, 2)
comment_box.setEnabled(False)
comment_box.hide()
"""Details View
____________________________
| |
| An Item 23 ms |
| - family |
| |
|----------------------------|
| |
| Docstring |
|____________________________|
"""
details = view.Details(self)
"""Footer
______________________
| ___ ___ |
| | o || > ||
| |___||___||
|______________________|
"""
footer = QtWidgets.QWidget()
info = QtWidgets.QLabel()
spacer = QtWidgets.QWidget()
reset = QtWidgets.QPushButton(awesome["refresh"])
validate = QtWidgets.QPushButton(awesome["flask"])
play = QtWidgets.QPushButton(awesome["play"])
stop = QtWidgets.QPushButton(awesome["stop"])
layout = QtWidgets.QHBoxLayout(footer)
layout.setContentsMargins(5, 5, 5, 5)
layout.addWidget(info, 0)
layout.addWidget(spacer, 1)
layout.addWidget(reset, 0)
layout.addWidget(validate, 0)
layout.addWidget(play, 0)
layout.addWidget(stop, 0)
# Placeholder for when GUI is closing
# TODO(marcus): Fade to black and the the user about what's happening
closing_placeholder = QtWidgets.QWidget(self)
closing_placeholder.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
closing_placeholder.hide()
# Main layout
layout = QtWidgets.QVBoxLayout(self)
layout.addWidget(header, 0)
layout.addWidget(body, 3)
layout.addWidget(closing_placeholder, 1)
layout.addWidget(comment_box, 0)
layout.addWidget(footer, 0)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Animation
___
/ \
| | ___
\___/ / \
___ | |
/ \ \___/
| |
\___/
"""
# Display info
info_effect = QtWidgets.QGraphicsOpacityEffect(info)
info.setGraphicsEffect(info_effect)
timeline = QtCore.QSequentialAnimationGroup()
on = QtCore.QPropertyAnimation(info_effect, b"opacity")
on.setDuration(0)
on.setStartValue(0)
on.setEndValue(1)
off = QtCore.QPropertyAnimation(info_effect, b"opacity")
off.setDuration(0)
off.setStartValue(1)
off.setEndValue(0)
fade = QtCore.QPropertyAnimation(info_effect, b"opacity")
fade.setDuration(500)
fade.setStartValue(1.0)
fade.setEndValue(0.0)
timeline.addAnimation(on)
timeline.addPause(50)
timeline.addAnimation(off)
timeline.addPause(50)
timeline.addAnimation(on)
timeline.addPause(2000)
timeline.addAnimation(fade)
info_animation = timeline
"""Setup
Widgets are referred to in CSS via their object-name. We
use the same mechanism internally to refer to objects; so rather
than storing widgets as self.my_widget, it is referred to as:
>>> my_widget = self.findChild(QtWidgets.QWidget, "MyWidget")
This way there is only ever a single method of referring to any widget.
___
| |
/\/ \/\
/ _ \
\ / \ /
| | | |
/ \_/ \
\ /
\/\ /\/
|___|
"""
instance_model = model.Instance()
plugin_model = model.Plugin()
terminal_model = model.Terminal()
filter_model = model.ProxyModel(plugin_model)
artist_view.setModel(instance_model)
left_view.setModel(instance_model)
right_view.setModel(filter_model)
terminal_view.setModel(terminal_model)
instance_combo.setModel(instance_model)
plugin_combo.setModel(plugin_model)
names = {
# Main
"Header": header,
"Body": body,
"Footer": footer,
"Info": info,
# Modals
"Details": details,
# Pages
"Artist": artist_page,
"Overview": overview_page,
"Terminal": terminal_page,
# Tabs
"ArtistTab": artist_tab,
"OverviewTab": overview_tab,
"TerminalTab": terminal_tab,
# Buttons
"Play": play,
"Validate": validate,
"Reset": reset,
"Stop": stop,
# Misc
"CommentBox": comment_box,
"CommentPlaceholder": comment_placeholder,
"ClosingPlaceholder": closing_placeholder,
}
for name, w in names.items():
w.setObjectName(name)
# Enable CSS on plain QWidget objects
for w in (header,
body,
artist_page,
comment_box,
overview_page,
terminal_page,
footer,
play,
validate,
stop,
details,
reset,
closing_placeholder):
w.setAttribute(QtCore.Qt.WA_StyledBackground)
self.data = {
"views": {
"artist": artist_view,
"left": left_view,
"right": right_view,
"terminal": terminal_view,
},
"modals": {
"details": details,
},
"models": {
"instances": instance_model,
"plugins": plugin_model,
"filter": filter_model,
"terminal": terminal_model,
},
"terminal_toggles": {
"record": show_records,
"debug": show_debug,
"info": show_info,
"warning": show_warning,
"error": show_error,
"critical": show_critical
},
"tabs": {
"artist": artist_tab,
"overview": overview_tab,
"terminal": terminal_tab,
"current": "artist"
},
"pages": {
"artist": artist_page,
"overview": overview_page,
"terminal": terminal_page,
},
"buttons": {
"play": play,
"validate": validate,
"stop": stop,
"reset": reset
},
"animation": {
"display_info": info_animation,
},
"state": {
"is_closing": False,
}
}
# Pressing Enter defaults to Play
play.setFocus()
"""Signals
________ ________
|________|-->|________|
|
|
___v____
|________|
"""
artist_tab.toggled.connect(
lambda: self.on_tab_changed("artist"))
overview_tab.toggled.connect(
lambda: self.on_tab_changed("overview"))
terminal_tab.toggled.connect(
lambda: self.on_tab_changed("terminal"))
controller.was_reset.connect(self.on_was_reset)
controller.was_validated.connect(self.on_was_validated)
controller.was_published.connect(self.on_was_published)
controller.was_acted.connect(self.on_was_acted)
controller.was_finished.connect(self.on_finished)
# Discovery happens synchronously during reset, that's
# why it's important that this connection is triggered
# right away.
controller.was_discovered.connect(self.on_was_discovered,
QtCore.Qt.DirectConnection)
# This is called synchronously on each process
controller.was_processed.connect(self.on_was_processed,
QtCore.Qt.DirectConnection)
# NOTE: Listeners to this signal are run in the main thread
controller.about_to_process.connect(self.on_about_to_process,
QtCore.Qt.DirectConnection)
artist_view.toggled.connect(self.on_item_toggled)
left_view.toggled.connect(self.on_item_toggled)
right_view.toggled.connect(self.on_item_toggled)
artist_view.inspected.connect(self.on_item_inspected)
left_view.inspected.connect(self.on_item_inspected)
right_view.inspected.connect(self.on_item_inspected)
terminal_view.inspected.connect(self.on_item_inspected)
reset.clicked.connect(self.on_reset_clicked)
validate.clicked.connect(self.on_validate_clicked)
play.clicked.connect(self.on_play_clicked)
stop.clicked.connect(self.on_stop_clicked)
comment_box.textChanged.connect(self.on_comment_entered)
comment_box.returnPressed.connect(self.on_play_clicked)
right_view.customContextMenuRequested.connect(
self.on_plugin_action_menu_requested)
for box in (show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
box.setChecked(True)
self.data["tabs"][settings.InitialTab].setChecked(True)
# -------------------------------------------------------------------------
#
# Event handlers
#
# -------------------------------------------------------------------------
def on_item_expanded(self, index, state):
if not index.data(model.IsExpandable):
return
if state is None:
state = not index.data(model.Expanded)
# Collapse others
for i in index.model():
index.model().setData(i, False, model.Expanded)
index.model().setData(index, state, model.Expanded)
def on_item_inspected(self, index):
details = self.data["modals"]["details"]
details.move(QtGui.QCursor.pos())
if index.data(model.Type) == "record":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["circle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "LogRecord (%s)" % index.data(model.LogLevel),
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "error":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["exclamation-triangle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "Exception",
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "plugin":
details.show({
"icon": index.data(model.Icon) or awesome["filter"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": index.data(model.Docstring) or "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
elif index.data(model.Type) == "instance":
details.show({
"icon": index.data(model.Icon) or awesome["file"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
def on_item_toggled(self, index, state=None):
"""An item is requesting to be toggled"""
if not index.data(model.IsIdle):
return self.info("Cannot toggle")
if not index.data(model.IsOptional):
return self.info("This item is mandatory")
if state is None:
state = not index.data(model.IsChecked)
index.model().setData(index, state, model.IsChecked)
# Withdraw option to publish if no instances are toggled
play = self.findChild(QtWidgets.QWidget, "Play")
validate = self.findChild(QtWidgets.QWidget, "Validate")
any_instances = any(index.data(model.IsChecked)
for index in self.data["models"]["instances"])
play.setEnabled(any_instances)
validate.setEnabled(any_instances)
# Emit signals
if index.data(model.Type) == "instance":
instance = self.data["models"]["instances"].items[index.row()]
util.defer(
100, lambda: self.controller.emit_(
signal="instanceToggled",
kwargs={"new_value": state,
"old_value": not state,
"instance": instance}))
if index.data(model.Type) == "plugin":
util.defer(
100, lambda: self.controller.emit_(
signal="pluginToggled",
kwargs={"new_value": state,
"old_value": not state,
"plugin": index.data(model.Object)}))
def on_tab_changed(self, target):
for page in self.data["pages"].values():
page.hide()
page = self.data["pages"][target]
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
if target == "terminal":
comment_box.hide()
else:
comment_box.setVisible(comment_box.isEnabled())
page.show()
self.data["tabs"]["current"] = target
def on_validate_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.validate()
def on_play_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.publish()
def on_reset_clicked(self):
self.reset()
def on_stop_clicked(self):
self.info("Stopping..")
self.controller.is_running = False
def on_comment_entered(self):
"""The user has typed a comment"""
text_edit = self.findChild(QtWidgets.QWidget, "CommentBox")
comment = text_edit.text()
# Store within context
context = self.controller.context
context.data["comment"] = comment
placeholder = self.findChild(QtWidgets.QLabel, "CommentPlaceholder")
placeholder.setVisible(not comment)
def on_about_to_process(self, plugin, instance):
"""Reflect currently running pair in GUI"""
if instance is not None:
instance_model = self.data["models"]["instances"]
index = instance_model.items.index(instance)
index = instance_model.createIndex(index, 0)
instance_model.setData(index, True, model.IsProcessing)
plugin_model = self.data["models"]["plugins"]
index = plugin_model.items.index(plugin)
index = plugin_model.createIndex(index, 0)
plugin_model.setData(index, True, model.IsProcessing)
self.info("%s %s" % (self.tr("Processing"), index.data(model.Label)))
def on_plugin_action_menu_requested(self, pos):
"""The user right-clicked on a plug-in
__________
| |
| Action 1 |
| Action 2 |
| Action 3 |
| |
|__________|
"""
index = self.data["views"]["right"].indexAt(pos)
actions = index.data(model.Actions)
if not actions:
return
menu = QtWidgets.QMenu(self)
plugins_index = self.data["models"]["filter"].mapToSource(index)
plugin = self.data["models"]["plugins"].items[plugins_index.row()]
print("plugin is: %s" % plugin)
for action in actions:
qaction = QtWidgets.QAction(action.label or action.__name__, self)
qaction.triggered.connect(partial(self.act, plugin, action))
menu.addAction(qaction)
menu.popup(self.data["views"]["right"].viewport().mapToGlobal(pos))
def on_was_discovered(self):
models = self.data["models"]
for Plugin in self.controller.plugins:
models["plugins"].append(Plugin)
def on_was_reset(self):
models = self.data["models"]
self.info(self.tr("Finishing up reset.."))
models["instances"].reset()
for instance in self.controller.context:
models["instances"].append(instance)
buttons = self.data["buttons"]
buttons["play"].show()
buttons["validate"].show()
buttons["reset"].show()
buttons["stop"].hide()
models["instances"].restore_checkstate()
models["plugins"].restore_checkstate()
# Append placeholder comment from Context
# This allows users to inject a comment from elsewhere,
# or to perhaps provide a placeholder comment/template
# for artists to fill in.
comment = self.controller.context.data.get("comment")
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setText(comment or None)
comment_box.setEnabled(comment is not None)
# Refresh tab
self.on_tab_changed(self.data["tabs"]["current"])
self.controller.current_error = None
self.on_finished()
def on_was_validated(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["play"].show()
buttons["stop"].hide()
self.on_finished()
def on_was_published(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
self.on_finished()
def on_was_processed(self, result):
models = self.data["models"]
for instance in self.controller.context:
if instance.id not in models["instances"].ids:
models["instances"].append(instance)
family = instance.data["family"]
if family:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=family)
families = instance.data.get("families")
if families:
for f in families:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=f)
models["plugins"].update_with_result(result)
models["instances"].update_with_result(result)
models["terminal"].update_with_result(result)
def on_was_acted(self, result):
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
# Update action with result
model_ = self.data["models"]["plugins"]
index = model_.items.index(result["plugin"])
index = model_.createIndex(index, 0)
model_.setData(index, not result["success"], model.ActionFailed)
model_.setData(index, False, model.IsProcessing)
models = self.data["models"]
models["terminal"].update_with_result(result)
self.on_finished()
def on_finished(self):
"""Finished signal handler"""
self.controller.is_running = False
error = self.controller.current_error
if error is not None:
self.info(self.tr("Stopped due to error(s), see Terminal."))
else:
self.info(self.tr("Finished successfully!"))
# -------------------------------------------------------------------------
#
# Functions
#
# -------------------------------------------------------------------------
def reset(self):
"""Prepare GUI for reset"""
self.info(self.tr("About to reset.."))
models = self.data["models"]
models["instances"].store_checkstate()
models["plugins"].store_checkstate()
# Reset current ids to secure no previous instances get mixed in.
models["instances"].ids = []
for m in models.values():
m.reset()
for b in self.data["buttons"].values():
b.hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
util.defer(500, self.controller.reset)
def validate(self):
self.info(self.tr("Preparing validate.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.validate)
def publish(self):
self.info(self.tr("Preparing publish.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.publish)
def act(self, plugin, action):
self.info("%s %s.." % (self.tr("Preparing"), action))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
self.controller.is_running = True
# Cause view to update, but it won't visually
# happen until Qt is given time to idle..
model_ = self.data["models"]["plugins"]
index = model_.items.index(plugin)
index = model_.createIndex(index, 0)
for key, value in {model.ActionIdle: False,
model.ActionFailed: False,
model.IsProcessing: True}.items():
model_.setData(index, value, key)
# Give Qt time to draw
util.defer(100, lambda: self.controller.act(plugin, action))
self.info(self.tr("Action prepared."))
def closeEvent(self, event):
"""Perform post-flight checks before closing
Make sure processing of any kind is wrapped up before closing
"""
# Make it snappy, but take care to clean it all up.
# TODO(marcus): Enable GUI to return on problem, such
# as asking whether or not the user really wants to quit
# given there are things currently running.
self.hide()
if self.data["state"]["is_closing"]:
# Explicitly clear potentially referenced data
self.info(self.tr("Cleaning up models.."))
for v in self.data["views"].values():
v.model().deleteLater()
v.setModel(None)
self.info(self.tr("Cleaning up terminal.."))
for item in self.data["models"]["terminal"].items:
del(item)
self.info(self.tr("Cleaning up controller.."))
self.controller.cleanup()
self.info(self.tr("All clean!"))
self.info(self.tr("Good bye"))
return super(Window, self).closeEvent(event)
self.info(self.tr("Closing.."))
def on_problem():
self.heads_up("Warning", "Had trouble closing down. "
"Please tell someone and try again.")
self.show()
if self.controller.is_running:
self.info(self.tr("..as soon as processing is finished.."))
self.controller.is_running = False
self.finished.connect(self.close)
util.defer(2000, on_problem)
return event.ignore()
self.data["state"]["is_closing"] = True
util.defer(200, self.close)
return event.ignore()
# -------------------------------------------------------------------------
#
# Feedback
#
# -------------------------------------------------------------------------
def info(self, message):
"""Print user-facing information
Arguments:
message (str): Text message for the user
"""
info = self.findChild(QtWidgets.QLabel, "Info")
info.setText(message)
# Include message in terminal
self.data["models"]["terminal"].append({
"label": message,
"type": "info"
})
animation = self.data["animation"]["display_info"]
animation.stop()
animation.start()
# TODO(marcus): Should this be configurable? Do we want
# the shell to fill up with these messages?
util.u_print(message)
def warning(self, message):
"""Block processing and print warning until user hits "Continue"
Arguments:
message (str): Message to display
"""
# TODO(marcus): Implement this.
self.info(message)
def heads_up(self, title, message, command=None):
"""Provide a front-and-center message with optional command
Arguments:
title (str): Bold and short message
message (str): Extended message
command (optional, callable): Function is provided as a button
"""
# TODO(marcus): Implement this.
self.info(message)
|
pyblish/pyblish-lite | pyblish_lite/window.py | Window.info | python | def info(self, message):
info = self.findChild(QtWidgets.QLabel, "Info")
info.setText(message)
# Include message in terminal
self.data["models"]["terminal"].append({
"label": message,
"type": "info"
})
animation = self.data["animation"]["display_info"]
animation.stop()
animation.start()
# TODO(marcus): Should this be configurable? Do we want
# the shell to fill up with these messages?
util.u_print(message) | Print user-facing information
Arguments:
message (str): Text message for the user | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/window.py#L1017-L1040 | [
"def u_print(msg, **kwargs):\n \"\"\"`print` with encoded unicode.\n\n `print` unicode may cause UnicodeEncodeError\n or non-readable result when `PYTHONIOENCODING` is not set.\n this will fix it.\n\n Arguments:\n msg (unicode): Message to print.\n **kwargs: Keyword argument for `print`... | class Window(QtWidgets.QDialog):
def __init__(self, controller, parent=None):
super(Window, self).__init__(parent)
icon = QtGui.QIcon(util.get_asset("img", "logo-extrasmall.png"))
self.setWindowFlags(self.windowFlags() |
QtCore.Qt.WindowTitleHint |
QtCore.Qt.WindowMaximizeButtonHint |
QtCore.Qt.WindowMinimizeButtonHint |
QtCore.Qt.WindowCloseButtonHint)
self.setWindowIcon(icon)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.controller = controller
"""General layout
__________________ _____________________
| | | | | |
| Header | --> | Tab | Tab | Tab |
|__________________| |_______|_______|_____|
| | _____________________
| | | |
| | | |
| Body | | |
| | --> | Page |
| | | |
| | |_____________________|
|__________________| _____________________
| | | | |
| Footer | | Status | Buttons |
|__________________| |___________|_________|
"""
header = QtWidgets.QWidget()
artist_tab = QtWidgets.QRadioButton()
overview_tab = QtWidgets.QRadioButton()
terminal_tab = QtWidgets.QRadioButton()
spacer = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(header)
layout.addWidget(artist_tab, 0)
layout.addWidget(overview_tab, 0)
layout.addWidget(terminal_tab, 0)
layout.addWidget(spacer, 1) # Compress items to the left
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Artist Page
__________________
| |
| | ------------ |
| | ----- |
| |
| | -------- |
| | ------- |
| |
|__________________|
"""
artist_page = QtWidgets.QWidget()
artist_view = view.Item()
artist_delegate = delegate.Artist()
artist_view.setItemDelegate(artist_delegate)
layout = QtWidgets.QVBoxLayout(artist_page)
layout.addWidget(artist_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Overview Page
___________________
| |
| o ----- o---- |
| o ---- o--- |
| o ---- o---- |
| o ---- o------ |
| |
|__________________|
"""
overview_page = QtWidgets.QWidget()
left_view = view.Item()
right_view = view.Item()
item_delegate = delegate.Item()
left_view.setItemDelegate(item_delegate)
right_view.setItemDelegate(item_delegate)
layout = QtWidgets.QHBoxLayout(overview_page)
layout.addWidget(left_view, 1)
layout.addWidget(right_view, 1)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
"""Terminal
__________________
| |
| \ |
| \ |
| / |
| / ______ |
| |
|__________________|
"""
terminal_container = QtWidgets.QWidget()
terminal_delegate = delegate.Terminal()
terminal_view = view.LogView()
terminal_view.setItemDelegate(terminal_delegate)
layout = QtWidgets.QVBoxLayout(terminal_container)
layout.addWidget(terminal_view)
layout.setContentsMargins(5, 5, 5, 5)
layout.setSpacing(0)
terminal_footer = QtWidgets.QWidget()
search_box = QtWidgets.QLineEdit()
instance_combo = QtWidgets.QComboBox()
plugin_combo = QtWidgets.QComboBox()
show_errors = QtWidgets.QCheckBox()
show_records = QtWidgets.QCheckBox()
show_debug = QtWidgets.QCheckBox()
show_info = QtWidgets.QCheckBox()
show_warning = QtWidgets.QCheckBox()
show_error = QtWidgets.QCheckBox()
show_critical = QtWidgets.QCheckBox()
layout = QtWidgets.QHBoxLayout(terminal_footer)
for w in (search_box,
instance_combo,
plugin_combo,
show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
layout.addWidget(w)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(3)
terminal_page = QtWidgets.QWidget()
layout = QtWidgets.QVBoxLayout(terminal_page)
layout.addWidget(terminal_container)
# layout.addWidget(terminal_footer) # TODO
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
# Add some room between window borders and contents
body = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout(body)
layout.setContentsMargins(5, 5, 5, 0)
layout.addWidget(artist_page)
layout.addWidget(overview_page)
layout.addWidget(terminal_page)
"""Comment Box
____________________________
|> My comment |
| |
|____________________________|
"""
comment_box = QtWidgets.QLineEdit()
comment_placeholder = QtWidgets.QLabel(
self.tr("Comment.."), comment_box)
comment_placeholder.move(2, 2)
comment_box.setEnabled(False)
comment_box.hide()
"""Details View
____________________________
| |
| An Item 23 ms |
| - family |
| |
|----------------------------|
| |
| Docstring |
|____________________________|
"""
details = view.Details(self)
"""Footer
______________________
| ___ ___ |
| | o || > ||
| |___||___||
|______________________|
"""
footer = QtWidgets.QWidget()
info = QtWidgets.QLabel()
spacer = QtWidgets.QWidget()
reset = QtWidgets.QPushButton(awesome["refresh"])
validate = QtWidgets.QPushButton(awesome["flask"])
play = QtWidgets.QPushButton(awesome["play"])
stop = QtWidgets.QPushButton(awesome["stop"])
layout = QtWidgets.QHBoxLayout(footer)
layout.setContentsMargins(5, 5, 5, 5)
layout.addWidget(info, 0)
layout.addWidget(spacer, 1)
layout.addWidget(reset, 0)
layout.addWidget(validate, 0)
layout.addWidget(play, 0)
layout.addWidget(stop, 0)
# Placeholder for when GUI is closing
# TODO(marcus): Fade to black and the the user about what's happening
closing_placeholder = QtWidgets.QWidget(self)
closing_placeholder.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
closing_placeholder.hide()
# Main layout
layout = QtWidgets.QVBoxLayout(self)
layout.addWidget(header, 0)
layout.addWidget(body, 3)
layout.addWidget(closing_placeholder, 1)
layout.addWidget(comment_box, 0)
layout.addWidget(footer, 0)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
"""Animation
___
/ \
| | ___
\___/ / \
___ | |
/ \ \___/
| |
\___/
"""
# Display info
info_effect = QtWidgets.QGraphicsOpacityEffect(info)
info.setGraphicsEffect(info_effect)
timeline = QtCore.QSequentialAnimationGroup()
on = QtCore.QPropertyAnimation(info_effect, b"opacity")
on.setDuration(0)
on.setStartValue(0)
on.setEndValue(1)
off = QtCore.QPropertyAnimation(info_effect, b"opacity")
off.setDuration(0)
off.setStartValue(1)
off.setEndValue(0)
fade = QtCore.QPropertyAnimation(info_effect, b"opacity")
fade.setDuration(500)
fade.setStartValue(1.0)
fade.setEndValue(0.0)
timeline.addAnimation(on)
timeline.addPause(50)
timeline.addAnimation(off)
timeline.addPause(50)
timeline.addAnimation(on)
timeline.addPause(2000)
timeline.addAnimation(fade)
info_animation = timeline
"""Setup
Widgets are referred to in CSS via their object-name. We
use the same mechanism internally to refer to objects; so rather
than storing widgets as self.my_widget, it is referred to as:
>>> my_widget = self.findChild(QtWidgets.QWidget, "MyWidget")
This way there is only ever a single method of referring to any widget.
___
| |
/\/ \/\
/ _ \
\ / \ /
| | | |
/ \_/ \
\ /
\/\ /\/
|___|
"""
instance_model = model.Instance()
plugin_model = model.Plugin()
terminal_model = model.Terminal()
filter_model = model.ProxyModel(plugin_model)
artist_view.setModel(instance_model)
left_view.setModel(instance_model)
right_view.setModel(filter_model)
terminal_view.setModel(terminal_model)
instance_combo.setModel(instance_model)
plugin_combo.setModel(plugin_model)
names = {
# Main
"Header": header,
"Body": body,
"Footer": footer,
"Info": info,
# Modals
"Details": details,
# Pages
"Artist": artist_page,
"Overview": overview_page,
"Terminal": terminal_page,
# Tabs
"ArtistTab": artist_tab,
"OverviewTab": overview_tab,
"TerminalTab": terminal_tab,
# Buttons
"Play": play,
"Validate": validate,
"Reset": reset,
"Stop": stop,
# Misc
"CommentBox": comment_box,
"CommentPlaceholder": comment_placeholder,
"ClosingPlaceholder": closing_placeholder,
}
for name, w in names.items():
w.setObjectName(name)
# Enable CSS on plain QWidget objects
for w in (header,
body,
artist_page,
comment_box,
overview_page,
terminal_page,
footer,
play,
validate,
stop,
details,
reset,
closing_placeholder):
w.setAttribute(QtCore.Qt.WA_StyledBackground)
self.data = {
"views": {
"artist": artist_view,
"left": left_view,
"right": right_view,
"terminal": terminal_view,
},
"modals": {
"details": details,
},
"models": {
"instances": instance_model,
"plugins": plugin_model,
"filter": filter_model,
"terminal": terminal_model,
},
"terminal_toggles": {
"record": show_records,
"debug": show_debug,
"info": show_info,
"warning": show_warning,
"error": show_error,
"critical": show_critical
},
"tabs": {
"artist": artist_tab,
"overview": overview_tab,
"terminal": terminal_tab,
"current": "artist"
},
"pages": {
"artist": artist_page,
"overview": overview_page,
"terminal": terminal_page,
},
"buttons": {
"play": play,
"validate": validate,
"stop": stop,
"reset": reset
},
"animation": {
"display_info": info_animation,
},
"state": {
"is_closing": False,
}
}
# Pressing Enter defaults to Play
play.setFocus()
"""Signals
________ ________
|________|-->|________|
|
|
___v____
|________|
"""
artist_tab.toggled.connect(
lambda: self.on_tab_changed("artist"))
overview_tab.toggled.connect(
lambda: self.on_tab_changed("overview"))
terminal_tab.toggled.connect(
lambda: self.on_tab_changed("terminal"))
controller.was_reset.connect(self.on_was_reset)
controller.was_validated.connect(self.on_was_validated)
controller.was_published.connect(self.on_was_published)
controller.was_acted.connect(self.on_was_acted)
controller.was_finished.connect(self.on_finished)
# Discovery happens synchronously during reset, that's
# why it's important that this connection is triggered
# right away.
controller.was_discovered.connect(self.on_was_discovered,
QtCore.Qt.DirectConnection)
# This is called synchronously on each process
controller.was_processed.connect(self.on_was_processed,
QtCore.Qt.DirectConnection)
# NOTE: Listeners to this signal are run in the main thread
controller.about_to_process.connect(self.on_about_to_process,
QtCore.Qt.DirectConnection)
artist_view.toggled.connect(self.on_item_toggled)
left_view.toggled.connect(self.on_item_toggled)
right_view.toggled.connect(self.on_item_toggled)
artist_view.inspected.connect(self.on_item_inspected)
left_view.inspected.connect(self.on_item_inspected)
right_view.inspected.connect(self.on_item_inspected)
terminal_view.inspected.connect(self.on_item_inspected)
reset.clicked.connect(self.on_reset_clicked)
validate.clicked.connect(self.on_validate_clicked)
play.clicked.connect(self.on_play_clicked)
stop.clicked.connect(self.on_stop_clicked)
comment_box.textChanged.connect(self.on_comment_entered)
comment_box.returnPressed.connect(self.on_play_clicked)
right_view.customContextMenuRequested.connect(
self.on_plugin_action_menu_requested)
for box in (show_errors,
show_records,
show_debug,
show_info,
show_warning,
show_error,
show_critical):
box.setChecked(True)
self.data["tabs"][settings.InitialTab].setChecked(True)
# -------------------------------------------------------------------------
#
# Event handlers
#
# -------------------------------------------------------------------------
def on_item_expanded(self, index, state):
if not index.data(model.IsExpandable):
return
if state is None:
state = not index.data(model.Expanded)
# Collapse others
for i in index.model():
index.model().setData(i, False, model.Expanded)
index.model().setData(index, state, model.Expanded)
def on_item_inspected(self, index):
details = self.data["modals"]["details"]
details.move(QtGui.QCursor.pos())
if index.data(model.Type) == "record":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["circle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "LogRecord (%s)" % index.data(model.LogLevel),
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "error":
# Compose available data
data = list()
for key, value in index.data(model.Data).items():
if key.startswith("_"):
continue
data.append("%s %s" % ((key + ":").ljust(12), value))
text = "\n".join(data)
details.show({
"icon": awesome["exclamation-triangle"],
"heading": index.data(model.Label).split("\n")[0],
"subheading": "Exception",
"text": text,
"timestamp": "",
})
elif index.data(model.Type) == "plugin":
details.show({
"icon": index.data(model.Icon) or awesome["filter"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": index.data(model.Docstring) or "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
elif index.data(model.Type) == "instance":
details.show({
"icon": index.data(model.Icon) or awesome["file"],
"heading": index.data(model.Label),
"subheading": ", ".join(index.data(model.Families)),
"text": "",
"timestamp": str(index.data(model.Duration) or 0) + " ms",
})
def on_item_toggled(self, index, state=None):
"""An item is requesting to be toggled"""
if not index.data(model.IsIdle):
return self.info("Cannot toggle")
if not index.data(model.IsOptional):
return self.info("This item is mandatory")
if state is None:
state = not index.data(model.IsChecked)
index.model().setData(index, state, model.IsChecked)
# Withdraw option to publish if no instances are toggled
play = self.findChild(QtWidgets.QWidget, "Play")
validate = self.findChild(QtWidgets.QWidget, "Validate")
any_instances = any(index.data(model.IsChecked)
for index in self.data["models"]["instances"])
play.setEnabled(any_instances)
validate.setEnabled(any_instances)
# Emit signals
if index.data(model.Type) == "instance":
instance = self.data["models"]["instances"].items[index.row()]
util.defer(
100, lambda: self.controller.emit_(
signal="instanceToggled",
kwargs={"new_value": state,
"old_value": not state,
"instance": instance}))
if index.data(model.Type) == "plugin":
util.defer(
100, lambda: self.controller.emit_(
signal="pluginToggled",
kwargs={"new_value": state,
"old_value": not state,
"plugin": index.data(model.Object)}))
def on_tab_changed(self, target):
for page in self.data["pages"].values():
page.hide()
page = self.data["pages"][target]
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
if target == "terminal":
comment_box.hide()
else:
comment_box.setVisible(comment_box.isEnabled())
page.show()
self.data["tabs"]["current"] = target
def on_validate_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.validate()
def on_play_clicked(self):
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setEnabled(False)
comment_box.hide()
self.publish()
def on_reset_clicked(self):
self.reset()
def on_stop_clicked(self):
self.info("Stopping..")
self.controller.is_running = False
def on_comment_entered(self):
"""The user has typed a comment"""
text_edit = self.findChild(QtWidgets.QWidget, "CommentBox")
comment = text_edit.text()
# Store within context
context = self.controller.context
context.data["comment"] = comment
placeholder = self.findChild(QtWidgets.QLabel, "CommentPlaceholder")
placeholder.setVisible(not comment)
def on_about_to_process(self, plugin, instance):
"""Reflect currently running pair in GUI"""
if instance is not None:
instance_model = self.data["models"]["instances"]
index = instance_model.items.index(instance)
index = instance_model.createIndex(index, 0)
instance_model.setData(index, True, model.IsProcessing)
plugin_model = self.data["models"]["plugins"]
index = plugin_model.items.index(plugin)
index = plugin_model.createIndex(index, 0)
plugin_model.setData(index, True, model.IsProcessing)
self.info("%s %s" % (self.tr("Processing"), index.data(model.Label)))
def on_plugin_action_menu_requested(self, pos):
"""The user right-clicked on a plug-in
__________
| |
| Action 1 |
| Action 2 |
| Action 3 |
| |
|__________|
"""
index = self.data["views"]["right"].indexAt(pos)
actions = index.data(model.Actions)
if not actions:
return
menu = QtWidgets.QMenu(self)
plugins_index = self.data["models"]["filter"].mapToSource(index)
plugin = self.data["models"]["plugins"].items[plugins_index.row()]
print("plugin is: %s" % plugin)
for action in actions:
qaction = QtWidgets.QAction(action.label or action.__name__, self)
qaction.triggered.connect(partial(self.act, plugin, action))
menu.addAction(qaction)
menu.popup(self.data["views"]["right"].viewport().mapToGlobal(pos))
def on_was_discovered(self):
models = self.data["models"]
for Plugin in self.controller.plugins:
models["plugins"].append(Plugin)
def on_was_reset(self):
models = self.data["models"]
self.info(self.tr("Finishing up reset.."))
models["instances"].reset()
for instance in self.controller.context:
models["instances"].append(instance)
buttons = self.data["buttons"]
buttons["play"].show()
buttons["validate"].show()
buttons["reset"].show()
buttons["stop"].hide()
models["instances"].restore_checkstate()
models["plugins"].restore_checkstate()
# Append placeholder comment from Context
# This allows users to inject a comment from elsewhere,
# or to perhaps provide a placeholder comment/template
# for artists to fill in.
comment = self.controller.context.data.get("comment")
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.setText(comment or None)
comment_box.setEnabled(comment is not None)
# Refresh tab
self.on_tab_changed(self.data["tabs"]["current"])
self.controller.current_error = None
self.on_finished()
def on_was_validated(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["play"].show()
buttons["stop"].hide()
self.on_finished()
def on_was_published(self):
plugin_model = self.data["models"]["plugins"]
instance_model = self.data["models"]["instances"]
for index in plugin_model:
index.model().setData(index, False, model.IsIdle)
for index in instance_model:
index.model().setData(index, False, model.IsIdle)
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
self.on_finished()
def on_was_processed(self, result):
models = self.data["models"]
for instance in self.controller.context:
if instance.id not in models["instances"].ids:
models["instances"].append(instance)
family = instance.data["family"]
if family:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=family)
families = instance.data.get("families")
if families:
for f in families:
plugins_filter = self.data["models"]["filter"]
plugins_filter.add_inclusion(role="families", value=f)
models["plugins"].update_with_result(result)
models["instances"].update_with_result(result)
models["terminal"].update_with_result(result)
def on_was_acted(self, result):
buttons = self.data["buttons"]
buttons["reset"].show()
buttons["stop"].hide()
# Update action with result
model_ = self.data["models"]["plugins"]
index = model_.items.index(result["plugin"])
index = model_.createIndex(index, 0)
model_.setData(index, not result["success"], model.ActionFailed)
model_.setData(index, False, model.IsProcessing)
models = self.data["models"]
models["terminal"].update_with_result(result)
self.on_finished()
def on_finished(self):
"""Finished signal handler"""
self.controller.is_running = False
error = self.controller.current_error
if error is not None:
self.info(self.tr("Stopped due to error(s), see Terminal."))
else:
self.info(self.tr("Finished successfully!"))
# -------------------------------------------------------------------------
#
# Functions
#
# -------------------------------------------------------------------------
def reset(self):
"""Prepare GUI for reset"""
self.info(self.tr("About to reset.."))
models = self.data["models"]
models["instances"].store_checkstate()
models["plugins"].store_checkstate()
# Reset current ids to secure no previous instances get mixed in.
models["instances"].ids = []
for m in models.values():
m.reset()
for b in self.data["buttons"].values():
b.hide()
comment_box = self.findChild(QtWidgets.QWidget, "CommentBox")
comment_box.hide()
util.defer(500, self.controller.reset)
def validate(self):
self.info(self.tr("Preparing validate.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.validate)
def publish(self):
self.info(self.tr("Preparing publish.."))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
util.defer(5, self.controller.publish)
def act(self, plugin, action):
self.info("%s %s.." % (self.tr("Preparing"), action))
for button in self.data["buttons"].values():
button.hide()
self.data["buttons"]["stop"].show()
self.controller.is_running = True
# Cause view to update, but it won't visually
# happen until Qt is given time to idle..
model_ = self.data["models"]["plugins"]
index = model_.items.index(plugin)
index = model_.createIndex(index, 0)
for key, value in {model.ActionIdle: False,
model.ActionFailed: False,
model.IsProcessing: True}.items():
model_.setData(index, value, key)
# Give Qt time to draw
util.defer(100, lambda: self.controller.act(plugin, action))
self.info(self.tr("Action prepared."))
def closeEvent(self, event):
"""Perform post-flight checks before closing
Make sure processing of any kind is wrapped up before closing
"""
# Make it snappy, but take care to clean it all up.
# TODO(marcus): Enable GUI to return on problem, such
# as asking whether or not the user really wants to quit
# given there are things currently running.
self.hide()
if self.data["state"]["is_closing"]:
# Explicitly clear potentially referenced data
self.info(self.tr("Cleaning up models.."))
for v in self.data["views"].values():
v.model().deleteLater()
v.setModel(None)
self.info(self.tr("Cleaning up terminal.."))
for item in self.data["models"]["terminal"].items:
del(item)
self.info(self.tr("Cleaning up controller.."))
self.controller.cleanup()
self.info(self.tr("All clean!"))
self.info(self.tr("Good bye"))
return super(Window, self).closeEvent(event)
self.info(self.tr("Closing.."))
def on_problem():
self.heads_up("Warning", "Had trouble closing down. "
"Please tell someone and try again.")
self.show()
if self.controller.is_running:
self.info(self.tr("..as soon as processing is finished.."))
self.controller.is_running = False
self.finished.connect(self.close)
util.defer(2000, on_problem)
return event.ignore()
self.data["state"]["is_closing"] = True
util.defer(200, self.close)
return event.ignore()
def reject(self):
"""Handle ESC key"""
if self.controller.is_running:
self.info(self.tr("Stopping.."))
self.controller.is_running = False
# -------------------------------------------------------------------------
#
# Feedback
#
# -------------------------------------------------------------------------
def warning(self, message):
"""Block processing and print warning until user hits "Continue"
Arguments:
message (str): Message to display
"""
# TODO(marcus): Implement this.
self.info(message)
def heads_up(self, title, message, command=None):
"""Provide a front-and-center message with optional command
Arguments:
title (str): Bold and short message
message (str): Extended message
command (optional, callable): Function is provided as a button
"""
# TODO(marcus): Implement this.
self.info(message)
|
pyblish/pyblish-lite | pyblish_lite/view.py | LogView.rowsInserted | python | def rowsInserted(self, parent, start, end):
super(LogView, self).rowsInserted(parent, start, end)
# IMPORTANT: This must be done *after* the superclass to get
# an accurate value of the delegate's height.
self.scrollToBottom() | Automatically scroll to bottom on each new item added
Arguments:
parent (QtCore.QModelIndex): The model itself, since this is a list
start (int): Start index of item
end (int): End index of item | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/view.py#L90-L104 | null | class LogView(QtWidgets.QListView):
# An item is requesting details
inspected = QtCore.Signal("QModelIndex")
def __init__(self, parent=None):
super(LogView, self).__init__(parent)
self.horizontalScrollBar().hide()
self.viewport().setAttribute(QtCore.Qt.WA_Hover, True)
self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.setSelectionMode(QtWidgets.QListView.ExtendedSelection)
self.setVerticalScrollMode(QtWidgets.QListView.ScrollPerPixel)
def mousePressEvent(self, event):
if event.button() == QtCore.Qt.MidButton:
index = self.indexAt(event.pos())
self.inspected.emit(index) if index.isValid() else None
return super(LogView, self).mousePressEvent(event)
|
pyblish/pyblish-lite | pyblish_lite/control.py | Controller.reset | python | def reset(self):
self.context = pyblish.api.Context()
self.plugins = pyblish.api.discover()
self.was_discovered.emit()
self.pair_generator = None
self.current_pair = (None, None)
self.current_error = None
self.processing = {
"nextOrder": None,
"ordersWithError": set()
}
self._load()
self._run(until=pyblish.api.CollectorOrder,
on_finished=self.was_reset.emit) | Discover plug-ins and run collection | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/control.py#L61-L79 | null | class Controller(QtCore.QObject):
# Emitted when the GUI is about to start processing;
# e.g. resetting, validating or publishing.
about_to_process = QtCore.Signal(object, object)
# Emitted for each process
was_processed = QtCore.Signal(dict)
was_discovered = QtCore.Signal()
was_reset = QtCore.Signal()
was_validated = QtCore.Signal()
was_published = QtCore.Signal()
was_acted = QtCore.Signal(dict)
# Emitted when processing has finished
was_finished = QtCore.Signal()
def __init__(self, parent=None):
super(Controller, self).__init__(parent)
self.context = list()
self.plugins = list()
# Data internal to the GUI itself
self.is_running = False
# Transient state used during publishing.
self.pair_generator = None # Active producer of pairs
self.current_pair = (None, None) # Active pair
self.current_error = None
# This is used to track whether or not to continue
# processing when, for example, validation has failed.
self.processing = {
"nextOrder": None,
"ordersWithError": set()
}
def validate(self):
self._run(until=pyblish.api.ValidatorOrder,
on_finished=self.on_validated)
def publish(self):
self._run(on_finished=self.on_published)
def on_validated(self):
pyblish.api.emit("validated", context=self.context)
self.was_validated.emit()
def on_published(self):
pyblish.api.emit("published", context=self.context)
self.was_published.emit()
def act(self, plugin, action):
context = self.context
def on_next():
result = pyblish.plugin.process(plugin, context, None, action.id)
self.was_acted.emit(result)
util.defer(100, on_next)
def emit_(self, signal, kwargs):
pyblish.api.emit(signal, **kwargs)
def _load(self):
"""Initiate new generator and load first pair"""
self.is_running = True
self.pair_generator = self._iterator(self.plugins,
self.context)
self.current_pair = next(self.pair_generator, (None, None))
self.current_error = None
self.is_running = False
def _process(self, plugin, instance=None):
"""Produce `result` from `plugin` and `instance`
:func:`process` shares state with :func:`_iterator` such that
an instance/plugin pair can be fetched and processed in isolation.
Arguments:
plugin (pyblish.api.Plugin): Produce result using plug-in
instance (optional, pyblish.api.Instance): Process this instance,
if no instance is provided, context is processed.
"""
self.processing["nextOrder"] = plugin.order
try:
result = pyblish.plugin.process(plugin, self.context, instance)
except Exception as e:
raise Exception("Unknown error: %s" % e)
else:
# Make note of the order at which the
# potential error error occured.
has_error = result["error"] is not None
if has_error:
self.processing["ordersWithError"].add(plugin.order)
return result
def _run(self, until=float("inf"), on_finished=lambda: None):
"""Process current pair and store next pair for next process
Arguments:
until (pyblish.api.Order, optional): Keep fetching next()
until this order, default value is infinity.
on_finished (callable, optional): What to do when finishing,
defaults to doing nothing.
"""
def on_next():
if self.current_pair == (None, None):
return util.defer(100, on_finished_)
# The magic number 0.5 is the range between
# the various CVEI processing stages;
# e.g.
# - Collection is 0 +- 0.5 (-0.5 - 0.5)
# - Validation is 1 +- 0.5 (0.5 - 1.5)
#
# TODO(marcus): Make this less magical
#
order = self.current_pair[0].order
if order > (until + 0.5):
return util.defer(100, on_finished_)
self.about_to_process.emit(*self.current_pair)
util.defer(10, on_process)
def on_process():
try:
result = self._process(*self.current_pair)
if result["error"] is not None:
self.current_error = result["error"]
self.was_processed.emit(result)
except Exception as e:
stack = traceback.format_exc(e)
return util.defer(
500, lambda: on_unexpected_error(error=stack))
# Now that processing has completed, and context potentially
# modified with new instances, produce the next pair.
#
# IMPORTANT: This *must* be done *after* processing of
# the current pair, otherwise data generated at that point
# will *not* be included.
try:
self.current_pair = next(self.pair_generator)
except StopIteration:
# All pairs were processed successfully!
self.current_pair = (None, None)
return util.defer(500, on_finished_)
except Exception as e:
# This is a bug
stack = traceback.format_exc(e)
self.current_pair = (None, None)
return util.defer(
500, lambda: on_unexpected_error(error=stack))
util.defer(10, on_next)
def on_unexpected_error(error):
util.u_print(u"An unexpected error occurred:\n %s" % error)
return util.defer(500, on_finished_)
def on_finished_():
on_finished()
self.was_finished.emit()
self.is_running = True
util.defer(10, on_next)
def _iterator(self, plugins, context):
"""Yield next plug-in and instance to process.
Arguments:
plugins (list): Plug-ins to process
context (pyblish.api.Context): Context to process
"""
test = pyblish.logic.registered_test()
for plug, instance in pyblish.logic.Iterator(plugins, context):
if not plug.active:
continue
if instance is not None and instance.data.get("publish") is False:
continue
self.processing["nextOrder"] = plug.order
if not self.is_running:
raise StopIteration("Stopped")
if test(**self.processing):
raise StopIteration("Stopped due to %s" % test(
**self.processing))
yield plug, instance
def cleanup(self):
"""Forcefully delete objects from memory
In an ideal world, this shouldn't be necessary. Garbage
collection guarantees that anything without reference
is automatically removed.
However, because this application is designed to be run
multiple times from the same interpreter process, extra
case must be taken to ensure there are no memory leaks.
Explicitly deleting objects shines a light on where objects
may still be referenced in the form of an error. No errors
means this was uneccesary, but that's ok.
"""
for instance in self.context:
del(instance)
for plugin in self.plugins:
del(plugin)
|
pyblish/pyblish-lite | pyblish_lite/control.py | Controller._load | python | def _load(self):
self.is_running = True
self.pair_generator = self._iterator(self.plugins,
self.context)
self.current_pair = next(self.pair_generator, (None, None))
self.current_error = None
self.is_running = False | Initiate new generator and load first pair | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/control.py#L108-L115 | null | class Controller(QtCore.QObject):
# Emitted when the GUI is about to start processing;
# e.g. resetting, validating or publishing.
about_to_process = QtCore.Signal(object, object)
# Emitted for each process
was_processed = QtCore.Signal(dict)
was_discovered = QtCore.Signal()
was_reset = QtCore.Signal()
was_validated = QtCore.Signal()
was_published = QtCore.Signal()
was_acted = QtCore.Signal(dict)
# Emitted when processing has finished
was_finished = QtCore.Signal()
def __init__(self, parent=None):
super(Controller, self).__init__(parent)
self.context = list()
self.plugins = list()
# Data internal to the GUI itself
self.is_running = False
# Transient state used during publishing.
self.pair_generator = None # Active producer of pairs
self.current_pair = (None, None) # Active pair
self.current_error = None
# This is used to track whether or not to continue
# processing when, for example, validation has failed.
self.processing = {
"nextOrder": None,
"ordersWithError": set()
}
def reset(self):
"""Discover plug-ins and run collection"""
self.context = pyblish.api.Context()
self.plugins = pyblish.api.discover()
self.was_discovered.emit()
self.pair_generator = None
self.current_pair = (None, None)
self.current_error = None
self.processing = {
"nextOrder": None,
"ordersWithError": set()
}
self._load()
self._run(until=pyblish.api.CollectorOrder,
on_finished=self.was_reset.emit)
def validate(self):
self._run(until=pyblish.api.ValidatorOrder,
on_finished=self.on_validated)
def publish(self):
self._run(on_finished=self.on_published)
def on_validated(self):
pyblish.api.emit("validated", context=self.context)
self.was_validated.emit()
def on_published(self):
pyblish.api.emit("published", context=self.context)
self.was_published.emit()
def act(self, plugin, action):
context = self.context
def on_next():
result = pyblish.plugin.process(plugin, context, None, action.id)
self.was_acted.emit(result)
util.defer(100, on_next)
def emit_(self, signal, kwargs):
pyblish.api.emit(signal, **kwargs)
def _process(self, plugin, instance=None):
"""Produce `result` from `plugin` and `instance`
:func:`process` shares state with :func:`_iterator` such that
an instance/plugin pair can be fetched and processed in isolation.
Arguments:
plugin (pyblish.api.Plugin): Produce result using plug-in
instance (optional, pyblish.api.Instance): Process this instance,
if no instance is provided, context is processed.
"""
self.processing["nextOrder"] = plugin.order
try:
result = pyblish.plugin.process(plugin, self.context, instance)
except Exception as e:
raise Exception("Unknown error: %s" % e)
else:
# Make note of the order at which the
# potential error error occured.
has_error = result["error"] is not None
if has_error:
self.processing["ordersWithError"].add(plugin.order)
return result
def _run(self, until=float("inf"), on_finished=lambda: None):
"""Process current pair and store next pair for next process
Arguments:
until (pyblish.api.Order, optional): Keep fetching next()
until this order, default value is infinity.
on_finished (callable, optional): What to do when finishing,
defaults to doing nothing.
"""
def on_next():
if self.current_pair == (None, None):
return util.defer(100, on_finished_)
# The magic number 0.5 is the range between
# the various CVEI processing stages;
# e.g.
# - Collection is 0 +- 0.5 (-0.5 - 0.5)
# - Validation is 1 +- 0.5 (0.5 - 1.5)
#
# TODO(marcus): Make this less magical
#
order = self.current_pair[0].order
if order > (until + 0.5):
return util.defer(100, on_finished_)
self.about_to_process.emit(*self.current_pair)
util.defer(10, on_process)
def on_process():
try:
result = self._process(*self.current_pair)
if result["error"] is not None:
self.current_error = result["error"]
self.was_processed.emit(result)
except Exception as e:
stack = traceback.format_exc(e)
return util.defer(
500, lambda: on_unexpected_error(error=stack))
# Now that processing has completed, and context potentially
# modified with new instances, produce the next pair.
#
# IMPORTANT: This *must* be done *after* processing of
# the current pair, otherwise data generated at that point
# will *not* be included.
try:
self.current_pair = next(self.pair_generator)
except StopIteration:
# All pairs were processed successfully!
self.current_pair = (None, None)
return util.defer(500, on_finished_)
except Exception as e:
# This is a bug
stack = traceback.format_exc(e)
self.current_pair = (None, None)
return util.defer(
500, lambda: on_unexpected_error(error=stack))
util.defer(10, on_next)
def on_unexpected_error(error):
util.u_print(u"An unexpected error occurred:\n %s" % error)
return util.defer(500, on_finished_)
def on_finished_():
on_finished()
self.was_finished.emit()
self.is_running = True
util.defer(10, on_next)
def _iterator(self, plugins, context):
"""Yield next plug-in and instance to process.
Arguments:
plugins (list): Plug-ins to process
context (pyblish.api.Context): Context to process
"""
test = pyblish.logic.registered_test()
for plug, instance in pyblish.logic.Iterator(plugins, context):
if not plug.active:
continue
if instance is not None and instance.data.get("publish") is False:
continue
self.processing["nextOrder"] = plug.order
if not self.is_running:
raise StopIteration("Stopped")
if test(**self.processing):
raise StopIteration("Stopped due to %s" % test(
**self.processing))
yield plug, instance
def cleanup(self):
"""Forcefully delete objects from memory
In an ideal world, this shouldn't be necessary. Garbage
collection guarantees that anything without reference
is automatically removed.
However, because this application is designed to be run
multiple times from the same interpreter process, extra
case must be taken to ensure there are no memory leaks.
Explicitly deleting objects shines a light on where objects
may still be referenced in the form of an error. No errors
means this was uneccesary, but that's ok.
"""
for instance in self.context:
del(instance)
for plugin in self.plugins:
del(plugin)
|
pyblish/pyblish-lite | pyblish_lite/control.py | Controller._process | python | def _process(self, plugin, instance=None):
self.processing["nextOrder"] = plugin.order
try:
result = pyblish.plugin.process(plugin, self.context, instance)
except Exception as e:
raise Exception("Unknown error: %s" % e)
else:
# Make note of the order at which the
# potential error error occured.
has_error = result["error"] is not None
if has_error:
self.processing["ordersWithError"].add(plugin.order)
return result | Produce `result` from `plugin` and `instance`
:func:`process` shares state with :func:`_iterator` such that
an instance/plugin pair can be fetched and processed in isolation.
Arguments:
plugin (pyblish.api.Plugin): Produce result using plug-in
instance (optional, pyblish.api.Instance): Process this instance,
if no instance is provided, context is processed. | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/control.py#L117-L145 | null | class Controller(QtCore.QObject):
# Emitted when the GUI is about to start processing;
# e.g. resetting, validating or publishing.
about_to_process = QtCore.Signal(object, object)
# Emitted for each process
was_processed = QtCore.Signal(dict)
was_discovered = QtCore.Signal()
was_reset = QtCore.Signal()
was_validated = QtCore.Signal()
was_published = QtCore.Signal()
was_acted = QtCore.Signal(dict)
# Emitted when processing has finished
was_finished = QtCore.Signal()
def __init__(self, parent=None):
super(Controller, self).__init__(parent)
self.context = list()
self.plugins = list()
# Data internal to the GUI itself
self.is_running = False
# Transient state used during publishing.
self.pair_generator = None # Active producer of pairs
self.current_pair = (None, None) # Active pair
self.current_error = None
# This is used to track whether or not to continue
# processing when, for example, validation has failed.
self.processing = {
"nextOrder": None,
"ordersWithError": set()
}
def reset(self):
"""Discover plug-ins and run collection"""
self.context = pyblish.api.Context()
self.plugins = pyblish.api.discover()
self.was_discovered.emit()
self.pair_generator = None
self.current_pair = (None, None)
self.current_error = None
self.processing = {
"nextOrder": None,
"ordersWithError": set()
}
self._load()
self._run(until=pyblish.api.CollectorOrder,
on_finished=self.was_reset.emit)
def validate(self):
self._run(until=pyblish.api.ValidatorOrder,
on_finished=self.on_validated)
def publish(self):
self._run(on_finished=self.on_published)
def on_validated(self):
pyblish.api.emit("validated", context=self.context)
self.was_validated.emit()
def on_published(self):
pyblish.api.emit("published", context=self.context)
self.was_published.emit()
def act(self, plugin, action):
context = self.context
def on_next():
result = pyblish.plugin.process(plugin, context, None, action.id)
self.was_acted.emit(result)
util.defer(100, on_next)
def emit_(self, signal, kwargs):
pyblish.api.emit(signal, **kwargs)
def _load(self):
"""Initiate new generator and load first pair"""
self.is_running = True
self.pair_generator = self._iterator(self.plugins,
self.context)
self.current_pair = next(self.pair_generator, (None, None))
self.current_error = None
self.is_running = False
def _run(self, until=float("inf"), on_finished=lambda: None):
"""Process current pair and store next pair for next process
Arguments:
until (pyblish.api.Order, optional): Keep fetching next()
until this order, default value is infinity.
on_finished (callable, optional): What to do when finishing,
defaults to doing nothing.
"""
def on_next():
if self.current_pair == (None, None):
return util.defer(100, on_finished_)
# The magic number 0.5 is the range between
# the various CVEI processing stages;
# e.g.
# - Collection is 0 +- 0.5 (-0.5 - 0.5)
# - Validation is 1 +- 0.5 (0.5 - 1.5)
#
# TODO(marcus): Make this less magical
#
order = self.current_pair[0].order
if order > (until + 0.5):
return util.defer(100, on_finished_)
self.about_to_process.emit(*self.current_pair)
util.defer(10, on_process)
def on_process():
try:
result = self._process(*self.current_pair)
if result["error"] is not None:
self.current_error = result["error"]
self.was_processed.emit(result)
except Exception as e:
stack = traceback.format_exc(e)
return util.defer(
500, lambda: on_unexpected_error(error=stack))
# Now that processing has completed, and context potentially
# modified with new instances, produce the next pair.
#
# IMPORTANT: This *must* be done *after* processing of
# the current pair, otherwise data generated at that point
# will *not* be included.
try:
self.current_pair = next(self.pair_generator)
except StopIteration:
# All pairs were processed successfully!
self.current_pair = (None, None)
return util.defer(500, on_finished_)
except Exception as e:
# This is a bug
stack = traceback.format_exc(e)
self.current_pair = (None, None)
return util.defer(
500, lambda: on_unexpected_error(error=stack))
util.defer(10, on_next)
def on_unexpected_error(error):
util.u_print(u"An unexpected error occurred:\n %s" % error)
return util.defer(500, on_finished_)
def on_finished_():
on_finished()
self.was_finished.emit()
self.is_running = True
util.defer(10, on_next)
def _iterator(self, plugins, context):
"""Yield next plug-in and instance to process.
Arguments:
plugins (list): Plug-ins to process
context (pyblish.api.Context): Context to process
"""
test = pyblish.logic.registered_test()
for plug, instance in pyblish.logic.Iterator(plugins, context):
if not plug.active:
continue
if instance is not None and instance.data.get("publish") is False:
continue
self.processing["nextOrder"] = plug.order
if not self.is_running:
raise StopIteration("Stopped")
if test(**self.processing):
raise StopIteration("Stopped due to %s" % test(
**self.processing))
yield plug, instance
def cleanup(self):
"""Forcefully delete objects from memory
In an ideal world, this shouldn't be necessary. Garbage
collection guarantees that anything without reference
is automatically removed.
However, because this application is designed to be run
multiple times from the same interpreter process, extra
case must be taken to ensure there are no memory leaks.
Explicitly deleting objects shines a light on where objects
may still be referenced in the form of an error. No errors
means this was uneccesary, but that's ok.
"""
for instance in self.context:
del(instance)
for plugin in self.plugins:
del(plugin)
|
pyblish/pyblish-lite | pyblish_lite/control.py | Controller._run | python | def _run(self, until=float("inf"), on_finished=lambda: None):
def on_next():
if self.current_pair == (None, None):
return util.defer(100, on_finished_)
# The magic number 0.5 is the range between
# the various CVEI processing stages;
# e.g.
# - Collection is 0 +- 0.5 (-0.5 - 0.5)
# - Validation is 1 +- 0.5 (0.5 - 1.5)
#
# TODO(marcus): Make this less magical
#
order = self.current_pair[0].order
if order > (until + 0.5):
return util.defer(100, on_finished_)
self.about_to_process.emit(*self.current_pair)
util.defer(10, on_process)
def on_process():
try:
result = self._process(*self.current_pair)
if result["error"] is not None:
self.current_error = result["error"]
self.was_processed.emit(result)
except Exception as e:
stack = traceback.format_exc(e)
return util.defer(
500, lambda: on_unexpected_error(error=stack))
# Now that processing has completed, and context potentially
# modified with new instances, produce the next pair.
#
# IMPORTANT: This *must* be done *after* processing of
# the current pair, otherwise data generated at that point
# will *not* be included.
try:
self.current_pair = next(self.pair_generator)
except StopIteration:
# All pairs were processed successfully!
self.current_pair = (None, None)
return util.defer(500, on_finished_)
except Exception as e:
# This is a bug
stack = traceback.format_exc(e)
self.current_pair = (None, None)
return util.defer(
500, lambda: on_unexpected_error(error=stack))
util.defer(10, on_next)
def on_unexpected_error(error):
util.u_print(u"An unexpected error occurred:\n %s" % error)
return util.defer(500, on_finished_)
def on_finished_():
on_finished()
self.was_finished.emit()
self.is_running = True
util.defer(10, on_next) | Process current pair and store next pair for next process
Arguments:
until (pyblish.api.Order, optional): Keep fetching next()
until this order, default value is infinity.
on_finished (callable, optional): What to do when finishing,
defaults to doing nothing. | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/control.py#L147-L224 | null | class Controller(QtCore.QObject):
# Emitted when the GUI is about to start processing;
# e.g. resetting, validating or publishing.
about_to_process = QtCore.Signal(object, object)
# Emitted for each process
was_processed = QtCore.Signal(dict)
was_discovered = QtCore.Signal()
was_reset = QtCore.Signal()
was_validated = QtCore.Signal()
was_published = QtCore.Signal()
was_acted = QtCore.Signal(dict)
# Emitted when processing has finished
was_finished = QtCore.Signal()
def __init__(self, parent=None):
super(Controller, self).__init__(parent)
self.context = list()
self.plugins = list()
# Data internal to the GUI itself
self.is_running = False
# Transient state used during publishing.
self.pair_generator = None # Active producer of pairs
self.current_pair = (None, None) # Active pair
self.current_error = None
# This is used to track whether or not to continue
# processing when, for example, validation has failed.
self.processing = {
"nextOrder": None,
"ordersWithError": set()
}
def reset(self):
"""Discover plug-ins and run collection"""
self.context = pyblish.api.Context()
self.plugins = pyblish.api.discover()
self.was_discovered.emit()
self.pair_generator = None
self.current_pair = (None, None)
self.current_error = None
self.processing = {
"nextOrder": None,
"ordersWithError": set()
}
self._load()
self._run(until=pyblish.api.CollectorOrder,
on_finished=self.was_reset.emit)
def validate(self):
self._run(until=pyblish.api.ValidatorOrder,
on_finished=self.on_validated)
def publish(self):
self._run(on_finished=self.on_published)
def on_validated(self):
pyblish.api.emit("validated", context=self.context)
self.was_validated.emit()
def on_published(self):
pyblish.api.emit("published", context=self.context)
self.was_published.emit()
def act(self, plugin, action):
context = self.context
def on_next():
result = pyblish.plugin.process(plugin, context, None, action.id)
self.was_acted.emit(result)
util.defer(100, on_next)
def emit_(self, signal, kwargs):
pyblish.api.emit(signal, **kwargs)
def _load(self):
"""Initiate new generator and load first pair"""
self.is_running = True
self.pair_generator = self._iterator(self.plugins,
self.context)
self.current_pair = next(self.pair_generator, (None, None))
self.current_error = None
self.is_running = False
def _process(self, plugin, instance=None):
"""Produce `result` from `plugin` and `instance`
:func:`process` shares state with :func:`_iterator` such that
an instance/plugin pair can be fetched and processed in isolation.
Arguments:
plugin (pyblish.api.Plugin): Produce result using plug-in
instance (optional, pyblish.api.Instance): Process this instance,
if no instance is provided, context is processed.
"""
self.processing["nextOrder"] = plugin.order
try:
result = pyblish.plugin.process(plugin, self.context, instance)
except Exception as e:
raise Exception("Unknown error: %s" % e)
else:
# Make note of the order at which the
# potential error error occured.
has_error = result["error"] is not None
if has_error:
self.processing["ordersWithError"].add(plugin.order)
return result
def _iterator(self, plugins, context):
"""Yield next plug-in and instance to process.
Arguments:
plugins (list): Plug-ins to process
context (pyblish.api.Context): Context to process
"""
test = pyblish.logic.registered_test()
for plug, instance in pyblish.logic.Iterator(plugins, context):
if not plug.active:
continue
if instance is not None and instance.data.get("publish") is False:
continue
self.processing["nextOrder"] = plug.order
if not self.is_running:
raise StopIteration("Stopped")
if test(**self.processing):
raise StopIteration("Stopped due to %s" % test(
**self.processing))
yield plug, instance
def cleanup(self):
"""Forcefully delete objects from memory
In an ideal world, this shouldn't be necessary. Garbage
collection guarantees that anything without reference
is automatically removed.
However, because this application is designed to be run
multiple times from the same interpreter process, extra
case must be taken to ensure there are no memory leaks.
Explicitly deleting objects shines a light on where objects
may still be referenced in the form of an error. No errors
means this was uneccesary, but that's ok.
"""
for instance in self.context:
del(instance)
for plugin in self.plugins:
del(plugin)
|
pyblish/pyblish-lite | pyblish_lite/control.py | Controller._iterator | python | def _iterator(self, plugins, context):
test = pyblish.logic.registered_test()
for plug, instance in pyblish.logic.Iterator(plugins, context):
if not plug.active:
continue
if instance is not None and instance.data.get("publish") is False:
continue
self.processing["nextOrder"] = plug.order
if not self.is_running:
raise StopIteration("Stopped")
if test(**self.processing):
raise StopIteration("Stopped due to %s" % test(
**self.processing))
yield plug, instance | Yield next plug-in and instance to process.
Arguments:
plugins (list): Plug-ins to process
context (pyblish.api.Context): Context to process | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/control.py#L226-L253 | null | class Controller(QtCore.QObject):
# Emitted when the GUI is about to start processing;
# e.g. resetting, validating or publishing.
about_to_process = QtCore.Signal(object, object)
# Emitted for each process
was_processed = QtCore.Signal(dict)
was_discovered = QtCore.Signal()
was_reset = QtCore.Signal()
was_validated = QtCore.Signal()
was_published = QtCore.Signal()
was_acted = QtCore.Signal(dict)
# Emitted when processing has finished
was_finished = QtCore.Signal()
def __init__(self, parent=None):
super(Controller, self).__init__(parent)
self.context = list()
self.plugins = list()
# Data internal to the GUI itself
self.is_running = False
# Transient state used during publishing.
self.pair_generator = None # Active producer of pairs
self.current_pair = (None, None) # Active pair
self.current_error = None
# This is used to track whether or not to continue
# processing when, for example, validation has failed.
self.processing = {
"nextOrder": None,
"ordersWithError": set()
}
def reset(self):
"""Discover plug-ins and run collection"""
self.context = pyblish.api.Context()
self.plugins = pyblish.api.discover()
self.was_discovered.emit()
self.pair_generator = None
self.current_pair = (None, None)
self.current_error = None
self.processing = {
"nextOrder": None,
"ordersWithError": set()
}
self._load()
self._run(until=pyblish.api.CollectorOrder,
on_finished=self.was_reset.emit)
def validate(self):
self._run(until=pyblish.api.ValidatorOrder,
on_finished=self.on_validated)
def publish(self):
self._run(on_finished=self.on_published)
def on_validated(self):
pyblish.api.emit("validated", context=self.context)
self.was_validated.emit()
def on_published(self):
pyblish.api.emit("published", context=self.context)
self.was_published.emit()
def act(self, plugin, action):
context = self.context
def on_next():
result = pyblish.plugin.process(plugin, context, None, action.id)
self.was_acted.emit(result)
util.defer(100, on_next)
def emit_(self, signal, kwargs):
pyblish.api.emit(signal, **kwargs)
def _load(self):
"""Initiate new generator and load first pair"""
self.is_running = True
self.pair_generator = self._iterator(self.plugins,
self.context)
self.current_pair = next(self.pair_generator, (None, None))
self.current_error = None
self.is_running = False
def _process(self, plugin, instance=None):
"""Produce `result` from `plugin` and `instance`
:func:`process` shares state with :func:`_iterator` such that
an instance/plugin pair can be fetched and processed in isolation.
Arguments:
plugin (pyblish.api.Plugin): Produce result using plug-in
instance (optional, pyblish.api.Instance): Process this instance,
if no instance is provided, context is processed.
"""
self.processing["nextOrder"] = plugin.order
try:
result = pyblish.plugin.process(plugin, self.context, instance)
except Exception as e:
raise Exception("Unknown error: %s" % e)
else:
# Make note of the order at which the
# potential error error occured.
has_error = result["error"] is not None
if has_error:
self.processing["ordersWithError"].add(plugin.order)
return result
def _run(self, until=float("inf"), on_finished=lambda: None):
"""Process current pair and store next pair for next process
Arguments:
until (pyblish.api.Order, optional): Keep fetching next()
until this order, default value is infinity.
on_finished (callable, optional): What to do when finishing,
defaults to doing nothing.
"""
def on_next():
if self.current_pair == (None, None):
return util.defer(100, on_finished_)
# The magic number 0.5 is the range between
# the various CVEI processing stages;
# e.g.
# - Collection is 0 +- 0.5 (-0.5 - 0.5)
# - Validation is 1 +- 0.5 (0.5 - 1.5)
#
# TODO(marcus): Make this less magical
#
order = self.current_pair[0].order
if order > (until + 0.5):
return util.defer(100, on_finished_)
self.about_to_process.emit(*self.current_pair)
util.defer(10, on_process)
def on_process():
try:
result = self._process(*self.current_pair)
if result["error"] is not None:
self.current_error = result["error"]
self.was_processed.emit(result)
except Exception as e:
stack = traceback.format_exc(e)
return util.defer(
500, lambda: on_unexpected_error(error=stack))
# Now that processing has completed, and context potentially
# modified with new instances, produce the next pair.
#
# IMPORTANT: This *must* be done *after* processing of
# the current pair, otherwise data generated at that point
# will *not* be included.
try:
self.current_pair = next(self.pair_generator)
except StopIteration:
# All pairs were processed successfully!
self.current_pair = (None, None)
return util.defer(500, on_finished_)
except Exception as e:
# This is a bug
stack = traceback.format_exc(e)
self.current_pair = (None, None)
return util.defer(
500, lambda: on_unexpected_error(error=stack))
util.defer(10, on_next)
def on_unexpected_error(error):
util.u_print(u"An unexpected error occurred:\n %s" % error)
return util.defer(500, on_finished_)
def on_finished_():
on_finished()
self.was_finished.emit()
self.is_running = True
util.defer(10, on_next)
def cleanup(self):
"""Forcefully delete objects from memory
In an ideal world, this shouldn't be necessary. Garbage
collection guarantees that anything without reference
is automatically removed.
However, because this application is designed to be run
multiple times from the same interpreter process, extra
case must be taken to ensure there are no memory leaks.
Explicitly deleting objects shines a light on where objects
may still be referenced in the form of an error. No errors
means this was uneccesary, but that's ok.
"""
for instance in self.context:
del(instance)
for plugin in self.plugins:
del(plugin)
|
pyblish/pyblish-lite | pyblish_lite/control.py | Controller.cleanup | python | def cleanup(self):
for instance in self.context:
del(instance)
for plugin in self.plugins:
del(plugin) | Forcefully delete objects from memory
In an ideal world, this shouldn't be necessary. Garbage
collection guarantees that anything without reference
is automatically removed.
However, because this application is designed to be run
multiple times from the same interpreter process, extra
case must be taken to ensure there are no memory leaks.
Explicitly deleting objects shines a light on where objects
may still be referenced in the form of an error. No errors
means this was uneccesary, but that's ok. | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/control.py#L255-L276 | null | class Controller(QtCore.QObject):
# Emitted when the GUI is about to start processing;
# e.g. resetting, validating or publishing.
about_to_process = QtCore.Signal(object, object)
# Emitted for each process
was_processed = QtCore.Signal(dict)
was_discovered = QtCore.Signal()
was_reset = QtCore.Signal()
was_validated = QtCore.Signal()
was_published = QtCore.Signal()
was_acted = QtCore.Signal(dict)
# Emitted when processing has finished
was_finished = QtCore.Signal()
def __init__(self, parent=None):
super(Controller, self).__init__(parent)
self.context = list()
self.plugins = list()
# Data internal to the GUI itself
self.is_running = False
# Transient state used during publishing.
self.pair_generator = None # Active producer of pairs
self.current_pair = (None, None) # Active pair
self.current_error = None
# This is used to track whether or not to continue
# processing when, for example, validation has failed.
self.processing = {
"nextOrder": None,
"ordersWithError": set()
}
def reset(self):
"""Discover plug-ins and run collection"""
self.context = pyblish.api.Context()
self.plugins = pyblish.api.discover()
self.was_discovered.emit()
self.pair_generator = None
self.current_pair = (None, None)
self.current_error = None
self.processing = {
"nextOrder": None,
"ordersWithError": set()
}
self._load()
self._run(until=pyblish.api.CollectorOrder,
on_finished=self.was_reset.emit)
def validate(self):
self._run(until=pyblish.api.ValidatorOrder,
on_finished=self.on_validated)
def publish(self):
self._run(on_finished=self.on_published)
def on_validated(self):
pyblish.api.emit("validated", context=self.context)
self.was_validated.emit()
def on_published(self):
pyblish.api.emit("published", context=self.context)
self.was_published.emit()
def act(self, plugin, action):
context = self.context
def on_next():
result = pyblish.plugin.process(plugin, context, None, action.id)
self.was_acted.emit(result)
util.defer(100, on_next)
def emit_(self, signal, kwargs):
pyblish.api.emit(signal, **kwargs)
def _load(self):
"""Initiate new generator and load first pair"""
self.is_running = True
self.pair_generator = self._iterator(self.plugins,
self.context)
self.current_pair = next(self.pair_generator, (None, None))
self.current_error = None
self.is_running = False
def _process(self, plugin, instance=None):
"""Produce `result` from `plugin` and `instance`
:func:`process` shares state with :func:`_iterator` such that
an instance/plugin pair can be fetched and processed in isolation.
Arguments:
plugin (pyblish.api.Plugin): Produce result using plug-in
instance (optional, pyblish.api.Instance): Process this instance,
if no instance is provided, context is processed.
"""
self.processing["nextOrder"] = plugin.order
try:
result = pyblish.plugin.process(plugin, self.context, instance)
except Exception as e:
raise Exception("Unknown error: %s" % e)
else:
# Make note of the order at which the
# potential error error occured.
has_error = result["error"] is not None
if has_error:
self.processing["ordersWithError"].add(plugin.order)
return result
def _run(self, until=float("inf"), on_finished=lambda: None):
"""Process current pair and store next pair for next process
Arguments:
until (pyblish.api.Order, optional): Keep fetching next()
until this order, default value is infinity.
on_finished (callable, optional): What to do when finishing,
defaults to doing nothing.
"""
def on_next():
if self.current_pair == (None, None):
return util.defer(100, on_finished_)
# The magic number 0.5 is the range between
# the various CVEI processing stages;
# e.g.
# - Collection is 0 +- 0.5 (-0.5 - 0.5)
# - Validation is 1 +- 0.5 (0.5 - 1.5)
#
# TODO(marcus): Make this less magical
#
order = self.current_pair[0].order
if order > (until + 0.5):
return util.defer(100, on_finished_)
self.about_to_process.emit(*self.current_pair)
util.defer(10, on_process)
def on_process():
try:
result = self._process(*self.current_pair)
if result["error"] is not None:
self.current_error = result["error"]
self.was_processed.emit(result)
except Exception as e:
stack = traceback.format_exc(e)
return util.defer(
500, lambda: on_unexpected_error(error=stack))
# Now that processing has completed, and context potentially
# modified with new instances, produce the next pair.
#
# IMPORTANT: This *must* be done *after* processing of
# the current pair, otherwise data generated at that point
# will *not* be included.
try:
self.current_pair = next(self.pair_generator)
except StopIteration:
# All pairs were processed successfully!
self.current_pair = (None, None)
return util.defer(500, on_finished_)
except Exception as e:
# This is a bug
stack = traceback.format_exc(e)
self.current_pair = (None, None)
return util.defer(
500, lambda: on_unexpected_error(error=stack))
util.defer(10, on_next)
def on_unexpected_error(error):
util.u_print(u"An unexpected error occurred:\n %s" % error)
return util.defer(500, on_finished_)
def on_finished_():
on_finished()
self.was_finished.emit()
self.is_running = True
util.defer(10, on_next)
def _iterator(self, plugins, context):
"""Yield next plug-in and instance to process.
Arguments:
plugins (list): Plug-ins to process
context (pyblish.api.Context): Context to process
"""
test = pyblish.logic.registered_test()
for plug, instance in pyblish.logic.Iterator(plugins, context):
if not plug.active:
continue
if instance is not None and instance.data.get("publish") is False:
continue
self.processing["nextOrder"] = plug.order
if not self.is_running:
raise StopIteration("Stopped")
if test(**self.processing):
raise StopIteration("Stopped due to %s" % test(
**self.processing))
yield plug, instance
|
pyblish/pyblish-lite | pyblish_lite/vendor/Qt.py | _pyside | python | def _pyside():
import PySide as module
extras = ["QtUiTools"]
try:
try:
# Before merge of PySide and shiboken
import shiboken
except ImportError:
# After merge of PySide and shiboken, May 2017
from PySide import shiboken
extras.append("shiboken")
except ImportError:
pass
_setup(module, extras)
Qt.__binding_version__ = module.__version__
if hasattr(Qt, "_shiboken"):
Qt.QtCompat.wrapInstance = _wrapinstance
Qt.QtCompat.getCppPointer = _getcpppointer
if hasattr(Qt, "_QtUiTools"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtGui"):
setattr(Qt, "QtWidgets", _new_module("QtWidgets"))
setattr(Qt, "_QtWidgets", Qt._QtGui)
if hasattr(Qt._QtGui, "QX11Info"):
setattr(Qt, "QtX11Extras", _new_module("QtX11Extras"))
Qt.QtX11Extras.QX11Info = Qt._QtGui.QX11Info
Qt.QtCompat.setSectionResizeMode = Qt._QtGui.QHeaderView.setResizeMode
if hasattr(Qt, "_QtCore"):
Qt.__qt_version__ = Qt._QtCore.qVersion()
_reassign_misplaced_members("PySide")
_build_compatibility_members("PySide") | Initialise PySide | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/vendor/Qt.py#L1363-L1402 | null | """Minimal Python 2 & 3 shim around all Qt bindings
DOCUMENTATION
Qt.py was born in the film and visual effects industry to address
the growing need for the development of software capable of running
with more than one flavour of the Qt bindings for Python - PySide,
PySide2, PyQt4 and PyQt5.
1. Build for one, run with all
2. Explicit is better than implicit
3. Support co-existence
Default resolution order:
- PySide2
- PyQt5
- PySide
- PyQt4
Usage:
>> import sys
>> from Qt import QtWidgets
>> app = QtWidgets.QApplication(sys.argv)
>> button = QtWidgets.QPushButton("Hello World")
>> button.show()
>> app.exec_()
All members of PySide2 are mapped from other bindings, should they exist.
If no equivalent member exist, it is excluded from Qt.py and inaccessible.
The idea is to highlight members that exist across all supported binding,
and guarantee that code that runs on one binding runs on all others.
For more details, visit https://github.com/mottosso/Qt.py
LICENSE
See end of file for license (MIT, BSD) information.
"""
import os
import sys
import types
import shutil
__version__ = "1.2.0.b2"
# Enable support for `from Qt import *`
__all__ = []
# Flags from environment variables
QT_VERBOSE = bool(os.getenv("QT_VERBOSE"))
QT_PREFERRED_BINDING = os.getenv("QT_PREFERRED_BINDING", "")
QT_SIP_API_HINT = os.getenv("QT_SIP_API_HINT")
# Reference to Qt.py
Qt = sys.modules[__name__]
Qt.QtCompat = types.ModuleType("QtCompat")
try:
long
except NameError:
# Python 3 compatibility
long = int
"""Common members of all bindings
This is where each member of Qt.py is explicitly defined.
It is based on a "lowest common denominator" of all bindings;
including members found in each of the 4 bindings.
The "_common_members" dictionary is generated using the
build_membership.sh script.
"""
_common_members = {
"QtCore": [
"QAbstractAnimation",
"QAbstractEventDispatcher",
"QAbstractItemModel",
"QAbstractListModel",
"QAbstractState",
"QAbstractTableModel",
"QAbstractTransition",
"QAnimationGroup",
"QBasicTimer",
"QBitArray",
"QBuffer",
"QByteArray",
"QByteArrayMatcher",
"QChildEvent",
"QCoreApplication",
"QCryptographicHash",
"QDataStream",
"QDate",
"QDateTime",
"QDir",
"QDirIterator",
"QDynamicPropertyChangeEvent",
"QEasingCurve",
"QElapsedTimer",
"QEvent",
"QEventLoop",
"QEventTransition",
"QFile",
"QFileInfo",
"QFileSystemWatcher",
"QFinalState",
"QGenericArgument",
"QGenericReturnArgument",
"QHistoryState",
"QItemSelectionRange",
"QIODevice",
"QLibraryInfo",
"QLine",
"QLineF",
"QLocale",
"QMargins",
"QMetaClassInfo",
"QMetaEnum",
"QMetaMethod",
"QMetaObject",
"QMetaProperty",
"QMimeData",
"QModelIndex",
"QMutex",
"QMutexLocker",
"QObject",
"QParallelAnimationGroup",
"QPauseAnimation",
"QPersistentModelIndex",
"QPluginLoader",
"QPoint",
"QPointF",
"QProcess",
"QProcessEnvironment",
"QPropertyAnimation",
"QReadLocker",
"QReadWriteLock",
"QRect",
"QRectF",
"QRegExp",
"QResource",
"QRunnable",
"QSemaphore",
"QSequentialAnimationGroup",
"QSettings",
"QSignalMapper",
"QSignalTransition",
"QSize",
"QSizeF",
"QSocketNotifier",
"QState",
"QStateMachine",
"QSysInfo",
"QSystemSemaphore",
"QT_TRANSLATE_NOOP",
"QT_TR_NOOP",
"QT_TR_NOOP_UTF8",
"QTemporaryFile",
"QTextBoundaryFinder",
"QTextCodec",
"QTextDecoder",
"QTextEncoder",
"QTextStream",
"QTextStreamManipulator",
"QThread",
"QThreadPool",
"QTime",
"QTimeLine",
"QTimer",
"QTimerEvent",
"QTranslator",
"QUrl",
"QVariantAnimation",
"QWaitCondition",
"QWriteLocker",
"QXmlStreamAttribute",
"QXmlStreamAttributes",
"QXmlStreamEntityDeclaration",
"QXmlStreamEntityResolver",
"QXmlStreamNamespaceDeclaration",
"QXmlStreamNotationDeclaration",
"QXmlStreamReader",
"QXmlStreamWriter",
"Qt",
"QtCriticalMsg",
"QtDebugMsg",
"QtFatalMsg",
"QtMsgType",
"QtSystemMsg",
"QtWarningMsg",
"qAbs",
"qAddPostRoutine",
"qChecksum",
"qCritical",
"qDebug",
"qFatal",
"qFuzzyCompare",
"qIsFinite",
"qIsInf",
"qIsNaN",
"qIsNull",
"qRegisterResourceData",
"qUnregisterResourceData",
"qVersion",
"qWarning",
"qrand",
"qsrand"
],
"QtGui": [
"QAbstractTextDocumentLayout",
"QActionEvent",
"QBitmap",
"QBrush",
"QClipboard",
"QCloseEvent",
"QColor",
"QConicalGradient",
"QContextMenuEvent",
"QCursor",
"QDesktopServices",
"QDoubleValidator",
"QDrag",
"QDragEnterEvent",
"QDragLeaveEvent",
"QDragMoveEvent",
"QDropEvent",
"QFileOpenEvent",
"QFocusEvent",
"QFont",
"QFontDatabase",
"QFontInfo",
"QFontMetrics",
"QFontMetricsF",
"QGradient",
"QHelpEvent",
"QHideEvent",
"QHoverEvent",
"QIcon",
"QIconDragEvent",
"QIconEngine",
"QImage",
"QImageIOHandler",
"QImageReader",
"QImageWriter",
"QInputEvent",
"QInputMethodEvent",
"QIntValidator",
"QKeyEvent",
"QKeySequence",
"QLinearGradient",
"QMatrix2x2",
"QMatrix2x3",
"QMatrix2x4",
"QMatrix3x2",
"QMatrix3x3",
"QMatrix3x4",
"QMatrix4x2",
"QMatrix4x3",
"QMatrix4x4",
"QMouseEvent",
"QMoveEvent",
"QMovie",
"QPaintDevice",
"QPaintEngine",
"QPaintEngineState",
"QPaintEvent",
"QPainter",
"QPainterPath",
"QPainterPathStroker",
"QPalette",
"QPen",
"QPicture",
"QPictureIO",
"QPixmap",
"QPixmapCache",
"QPolygon",
"QPolygonF",
"QQuaternion",
"QRadialGradient",
"QRegExpValidator",
"QRegion",
"QResizeEvent",
"QSessionManager",
"QShortcutEvent",
"QShowEvent",
"QStandardItem",
"QStandardItemModel",
"QStatusTipEvent",
"QSyntaxHighlighter",
"QTabletEvent",
"QTextBlock",
"QTextBlockFormat",
"QTextBlockGroup",
"QTextBlockUserData",
"QTextCharFormat",
"QTextCursor",
"QTextDocument",
"QTextDocumentFragment",
"QTextFormat",
"QTextFragment",
"QTextFrame",
"QTextFrameFormat",
"QTextImageFormat",
"QTextInlineObject",
"QTextItem",
"QTextLayout",
"QTextLength",
"QTextLine",
"QTextList",
"QTextListFormat",
"QTextObject",
"QTextObjectInterface",
"QTextOption",
"QTextTable",
"QTextTableCell",
"QTextTableCellFormat",
"QTextTableFormat",
"QTouchEvent",
"QTransform",
"QValidator",
"QVector2D",
"QVector3D",
"QVector4D",
"QWhatsThisClickedEvent",
"QWheelEvent",
"QWindowStateChangeEvent",
"qAlpha",
"qBlue",
"qGray",
"qGreen",
"qIsGray",
"qRed",
"qRgb",
"qRgba"
],
"QtHelp": [
"QHelpContentItem",
"QHelpContentModel",
"QHelpContentWidget",
"QHelpEngine",
"QHelpEngineCore",
"QHelpIndexModel",
"QHelpIndexWidget",
"QHelpSearchEngine",
"QHelpSearchQuery",
"QHelpSearchQueryWidget",
"QHelpSearchResultWidget"
],
"QtMultimedia": [
"QAbstractVideoBuffer",
"QAbstractVideoSurface",
"QAudio",
"QAudioDeviceInfo",
"QAudioFormat",
"QAudioInput",
"QAudioOutput",
"QVideoFrame",
"QVideoSurfaceFormat"
],
"QtNetwork": [
"QAbstractNetworkCache",
"QAbstractSocket",
"QAuthenticator",
"QHostAddress",
"QHostInfo",
"QLocalServer",
"QLocalSocket",
"QNetworkAccessManager",
"QNetworkAddressEntry",
"QNetworkCacheMetaData",
"QNetworkConfiguration",
"QNetworkConfigurationManager",
"QNetworkCookie",
"QNetworkCookieJar",
"QNetworkDiskCache",
"QNetworkInterface",
"QNetworkProxy",
"QNetworkProxyFactory",
"QNetworkProxyQuery",
"QNetworkReply",
"QNetworkRequest",
"QNetworkSession",
"QSsl",
"QTcpServer",
"QTcpSocket",
"QUdpSocket"
],
"QtOpenGL": [
"QGL",
"QGLContext",
"QGLFormat",
"QGLWidget"
],
"QtPrintSupport": [
"QAbstractPrintDialog",
"QPageSetupDialog",
"QPrintDialog",
"QPrintEngine",
"QPrintPreviewDialog",
"QPrintPreviewWidget",
"QPrinter",
"QPrinterInfo"
],
"QtSql": [
"QSql",
"QSqlDatabase",
"QSqlDriver",
"QSqlDriverCreatorBase",
"QSqlError",
"QSqlField",
"QSqlIndex",
"QSqlQuery",
"QSqlQueryModel",
"QSqlRecord",
"QSqlRelation",
"QSqlRelationalDelegate",
"QSqlRelationalTableModel",
"QSqlResult",
"QSqlTableModel"
],
"QtSvg": [
"QGraphicsSvgItem",
"QSvgGenerator",
"QSvgRenderer",
"QSvgWidget"
],
"QtTest": [
"QTest"
],
"QtWidgets": [
"QAbstractButton",
"QAbstractGraphicsShapeItem",
"QAbstractItemDelegate",
"QAbstractItemView",
"QAbstractScrollArea",
"QAbstractSlider",
"QAbstractSpinBox",
"QAction",
"QActionGroup",
"QApplication",
"QBoxLayout",
"QButtonGroup",
"QCalendarWidget",
"QCheckBox",
"QColorDialog",
"QColumnView",
"QComboBox",
"QCommandLinkButton",
"QCommonStyle",
"QCompleter",
"QDataWidgetMapper",
"QDateEdit",
"QDateTimeEdit",
"QDesktopWidget",
"QDial",
"QDialog",
"QDialogButtonBox",
"QDirModel",
"QDockWidget",
"QDoubleSpinBox",
"QErrorMessage",
"QFileDialog",
"QFileIconProvider",
"QFileSystemModel",
"QFocusFrame",
"QFontComboBox",
"QFontDialog",
"QFormLayout",
"QFrame",
"QGesture",
"QGestureEvent",
"QGestureRecognizer",
"QGraphicsAnchor",
"QGraphicsAnchorLayout",
"QGraphicsBlurEffect",
"QGraphicsColorizeEffect",
"QGraphicsDropShadowEffect",
"QGraphicsEffect",
"QGraphicsEllipseItem",
"QGraphicsGridLayout",
"QGraphicsItem",
"QGraphicsItemGroup",
"QGraphicsLayout",
"QGraphicsLayoutItem",
"QGraphicsLineItem",
"QGraphicsLinearLayout",
"QGraphicsObject",
"QGraphicsOpacityEffect",
"QGraphicsPathItem",
"QGraphicsPixmapItem",
"QGraphicsPolygonItem",
"QGraphicsProxyWidget",
"QGraphicsRectItem",
"QGraphicsRotation",
"QGraphicsScale",
"QGraphicsScene",
"QGraphicsSceneContextMenuEvent",
"QGraphicsSceneDragDropEvent",
"QGraphicsSceneEvent",
"QGraphicsSceneHelpEvent",
"QGraphicsSceneHoverEvent",
"QGraphicsSceneMouseEvent",
"QGraphicsSceneMoveEvent",
"QGraphicsSceneResizeEvent",
"QGraphicsSceneWheelEvent",
"QGraphicsSimpleTextItem",
"QGraphicsTextItem",
"QGraphicsTransform",
"QGraphicsView",
"QGraphicsWidget",
"QGridLayout",
"QGroupBox",
"QHBoxLayout",
"QHeaderView",
"QInputDialog",
"QItemDelegate",
"QItemEditorCreatorBase",
"QItemEditorFactory",
"QKeyEventTransition",
"QLCDNumber",
"QLabel",
"QLayout",
"QLayoutItem",
"QLineEdit",
"QListView",
"QListWidget",
"QListWidgetItem",
"QMainWindow",
"QMdiArea",
"QMdiSubWindow",
"QMenu",
"QMenuBar",
"QMessageBox",
"QMouseEventTransition",
"QPanGesture",
"QPinchGesture",
"QPlainTextDocumentLayout",
"QPlainTextEdit",
"QProgressBar",
"QProgressDialog",
"QPushButton",
"QRadioButton",
"QRubberBand",
"QScrollArea",
"QScrollBar",
"QShortcut",
"QSizeGrip",
"QSizePolicy",
"QSlider",
"QSpacerItem",
"QSpinBox",
"QSplashScreen",
"QSplitter",
"QSplitterHandle",
"QStackedLayout",
"QStackedWidget",
"QStatusBar",
"QStyle",
"QStyleFactory",
"QStyleHintReturn",
"QStyleHintReturnMask",
"QStyleHintReturnVariant",
"QStyleOption",
"QStyleOptionButton",
"QStyleOptionComboBox",
"QStyleOptionComplex",
"QStyleOptionDockWidget",
"QStyleOptionFocusRect",
"QStyleOptionFrame",
"QStyleOptionGraphicsItem",
"QStyleOptionGroupBox",
"QStyleOptionHeader",
"QStyleOptionMenuItem",
"QStyleOptionProgressBar",
"QStyleOptionRubberBand",
"QStyleOptionSizeGrip",
"QStyleOptionSlider",
"QStyleOptionSpinBox",
"QStyleOptionTab",
"QStyleOptionTabBarBase",
"QStyleOptionTabWidgetFrame",
"QStyleOptionTitleBar",
"QStyleOptionToolBar",
"QStyleOptionToolBox",
"QStyleOptionToolButton",
"QStyleOptionViewItem",
"QStylePainter",
"QStyledItemDelegate",
"QSwipeGesture",
"QSystemTrayIcon",
"QTabBar",
"QTabWidget",
"QTableView",
"QTableWidget",
"QTableWidgetItem",
"QTableWidgetSelectionRange",
"QTapAndHoldGesture",
"QTapGesture",
"QTextBrowser",
"QTextEdit",
"QTimeEdit",
"QToolBar",
"QToolBox",
"QToolButton",
"QToolTip",
"QTreeView",
"QTreeWidget",
"QTreeWidgetItem",
"QTreeWidgetItemIterator",
"QUndoCommand",
"QUndoGroup",
"QUndoStack",
"QUndoView",
"QVBoxLayout",
"QWhatsThis",
"QWidget",
"QWidgetAction",
"QWidgetItem",
"QWizard",
"QWizardPage"
],
"QtX11Extras": [
"QX11Info"
],
"QtXml": [
"QDomAttr",
"QDomCDATASection",
"QDomCharacterData",
"QDomComment",
"QDomDocument",
"QDomDocumentFragment",
"QDomDocumentType",
"QDomElement",
"QDomEntity",
"QDomEntityReference",
"QDomImplementation",
"QDomNamedNodeMap",
"QDomNode",
"QDomNodeList",
"QDomNotation",
"QDomProcessingInstruction",
"QDomText",
"QXmlAttributes",
"QXmlContentHandler",
"QXmlDTDHandler",
"QXmlDeclHandler",
"QXmlDefaultHandler",
"QXmlEntityResolver",
"QXmlErrorHandler",
"QXmlInputSource",
"QXmlLexicalHandler",
"QXmlLocator",
"QXmlNamespaceSupport",
"QXmlParseException",
"QXmlReader",
"QXmlSimpleReader"
],
"QtXmlPatterns": [
"QAbstractMessageHandler",
"QAbstractUriResolver",
"QAbstractXmlNodeModel",
"QAbstractXmlReceiver",
"QSourceLocation",
"QXmlFormatter",
"QXmlItem",
"QXmlName",
"QXmlNamePool",
"QXmlNodeModelIndex",
"QXmlQuery",
"QXmlResultItems",
"QXmlSchema",
"QXmlSchemaValidator",
"QXmlSerializer"
]
}
def _qInstallMessageHandler(handler):
"""Install a message handler that works in all bindings
Args:
handler: A function that takes 3 arguments, or None
"""
def messageOutputHandler(*args):
# In Qt4 bindings, message handlers are passed 2 arguments
# In Qt5 bindings, message handlers are passed 3 arguments
# The first argument is a QtMsgType
# The last argument is the message to be printed
# The Middle argument (if passed) is a QMessageLogContext
if len(args) == 3:
msgType, logContext, msg = args
elif len(args) == 2:
msgType, msg = args
logContext = None
else:
raise TypeError(
"handler expected 2 or 3 arguments, got {0}".format(len(args)))
if isinstance(msg, bytes):
# In python 3, some bindings pass a bytestring, which cannot be
# used elsewhere. Decoding a python 2 or 3 bytestring object will
# consistently return a unicode object.
msg = msg.decode()
handler(msgType, logContext, msg)
passObject = messageOutputHandler if handler else handler
if Qt.IsPySide or Qt.IsPyQt4:
return Qt._QtCore.qInstallMsgHandler(passObject)
elif Qt.IsPySide2 or Qt.IsPyQt5:
return Qt._QtCore.qInstallMessageHandler(passObject)
def _getcpppointer(object):
if hasattr(Qt, "_shiboken2"):
return getattr(Qt, "_shiboken2").getCppPointer(object)[0]
elif hasattr(Qt, "_shiboken"):
return getattr(Qt, "_shiboken").getCppPointer(object)[0]
elif hasattr(Qt, "_sip"):
return getattr(Qt, "_sip").unwrapinstance(object)
raise AttributeError("'module' has no attribute 'getCppPointer'")
def _wrapinstance(ptr, base=None):
"""Enable implicit cast of pointer to most suitable class
This behaviour is available in sip per default.
Based on http://nathanhorne.com/pyqtpyside-wrap-instance
Usage:
This mechanism kicks in under these circumstances.
1. Qt.py is using PySide 1 or 2.
2. A `base` argument is not provided.
See :func:`QtCompat.wrapInstance()`
Arguments:
ptr (long): Pointer to QObject in memory
base (QObject, optional): Base class to wrap with. Defaults to QObject,
which should handle anything.
"""
assert isinstance(ptr, long), "Argument 'ptr' must be of type <long>"
assert (base is None) or issubclass(base, Qt.QtCore.QObject), (
"Argument 'base' must be of type <QObject>")
if Qt.IsPyQt4 or Qt.IsPyQt5:
func = getattr(Qt, "_sip").wrapinstance
elif Qt.IsPySide2:
func = getattr(Qt, "_shiboken2").wrapInstance
elif Qt.IsPySide:
func = getattr(Qt, "_shiboken").wrapInstance
else:
raise AttributeError("'module' has no attribute 'wrapInstance'")
if base is None:
q_object = func(long(ptr), Qt.QtCore.QObject)
meta_object = q_object.metaObject()
class_name = meta_object.className()
super_class_name = meta_object.superClass().className()
if hasattr(Qt.QtWidgets, class_name):
base = getattr(Qt.QtWidgets, class_name)
elif hasattr(Qt.QtWidgets, super_class_name):
base = getattr(Qt.QtWidgets, super_class_name)
else:
base = Qt.QtCore.QObject
return func(long(ptr), base)
def _translate(context, sourceText, *args):
# In Qt4 bindings, translate can be passed 2 or 3 arguments
# In Qt5 bindings, translate can be passed 2 arguments
# The first argument is disambiguation[str]
# The last argument is n[int]
# The middle argument can be encoding[QtCore.QCoreApplication.Encoding]
if len(args) == 3:
disambiguation, encoding, n = args
elif len(args) == 2:
disambiguation, n = args
encoding = None
else:
raise TypeError(
"Expected 4 or 5 arguments, got {0}.".format(len(args)+2))
if hasattr(Qt.QtCore, "QCoreApplication"):
app = getattr(Qt.QtCore, "QCoreApplication")
else:
raise NotImplementedError(
"Missing QCoreApplication implementation for {binding}".format(
binding=Qt.__binding__,
)
)
if Qt.__binding__ in ("PySide2", "PyQt5"):
sanitized_args = [context, sourceText, disambiguation, n]
else:
sanitized_args = [
context,
sourceText,
disambiguation,
encoding or app.CodecForTr,
n
]
return app.translate(*sanitized_args)
def _loadUi(uifile, baseinstance=None):
"""Dynamically load a user interface from the given `uifile`
This function calls `uic.loadUi` if using PyQt bindings,
else it implements a comparable binding for PySide.
Documentation:
http://pyqt.sourceforge.net/Docs/PyQt5/designer.html#PyQt5.uic.loadUi
Arguments:
uifile (str): Absolute path to Qt Designer file.
baseinstance (QWidget): Instantiated QWidget or subclass thereof
Return:
baseinstance if `baseinstance` is not `None`. Otherwise
return the newly created instance of the user interface.
"""
if hasattr(Qt, "_uic"):
return Qt._uic.loadUi(uifile, baseinstance)
elif hasattr(Qt, "_QtUiTools"):
# Implement `PyQt5.uic.loadUi` for PySide(2)
class _UiLoader(Qt._QtUiTools.QUiLoader):
"""Create the user interface in a base instance.
Unlike `Qt._QtUiTools.QUiLoader` itself this class does not
create a new instance of the top-level widget, but creates the user
interface in an existing instance of the top-level class if needed.
This mimics the behaviour of `PyQt5.uic.loadUi`.
"""
def __init__(self, baseinstance):
super(_UiLoader, self).__init__(baseinstance)
self.baseinstance = baseinstance
def load(self, uifile, *args, **kwargs):
from xml.etree.ElementTree import ElementTree
# For whatever reason, if this doesn't happen then
# reading an invalid or non-existing .ui file throws
# a RuntimeError.
etree = ElementTree()
etree.parse(uifile)
widget = Qt._QtUiTools.QUiLoader.load(
self, uifile, *args, **kwargs)
# Workaround for PySide 1.0.9, see issue #208
widget.parentWidget()
return widget
def createWidget(self, class_name, parent=None, name=""):
"""Called for each widget defined in ui file
Overridden here to populate `baseinstance` instead.
"""
if parent is None and self.baseinstance:
# Supposed to create the top-level widget,
# return the base instance instead
return self.baseinstance
# For some reason, Line is not in the list of available
# widgets, but works fine, so we have to special case it here.
if class_name in self.availableWidgets() + ["Line"]:
# Create a new widget for child widgets
widget = Qt._QtUiTools.QUiLoader.createWidget(self,
class_name,
parent,
name)
else:
raise Exception("Custom widget '%s' not supported"
% class_name)
if self.baseinstance:
# Set an attribute for the new child widget on the base
# instance, just like PyQt5.uic.loadUi does.
setattr(self.baseinstance, name, widget)
return widget
widget = _UiLoader(baseinstance).load(uifile)
Qt.QtCore.QMetaObject.connectSlotsByName(widget)
return widget
else:
raise NotImplementedError("No implementation available for loadUi")
"""Misplaced members
These members from the original submodule are misplaced relative PySide2
"""
_misplaced_members = {
"PySide2": {
"QtGui.QStringListModel": "QtCore.QStringListModel",
"QtCore.Property": "QtCore.Property",
"QtCore.Signal": "QtCore.Signal",
"QtCore.Slot": "QtCore.Slot",
"QtCore.QAbstractProxyModel": "QtCore.QAbstractProxyModel",
"QtCore.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel",
"QtCore.QItemSelection": "QtCore.QItemSelection",
"QtCore.QItemSelectionModel": "QtCore.QItemSelectionModel",
"QtCore.QItemSelectionRange": "QtCore.QItemSelectionRange",
"QtUiTools.QUiLoader": ["QtCompat.loadUi", _loadUi],
"shiboken2.wrapInstance": ["QtCompat.wrapInstance", _wrapinstance],
"shiboken2.getCppPointer": ["QtCompat.getCppPointer", _getcpppointer],
"QtWidgets.qApp": "QtWidgets.QApplication.instance()",
"QtCore.QCoreApplication.translate": [
"QtCompat.translate", _translate
],
"QtWidgets.QApplication.translate": [
"QtCompat.translate", _translate
],
"QtCore.qInstallMessageHandler": [
"QtCompat.qInstallMessageHandler", _qInstallMessageHandler
],
},
"PyQt5": {
"QtCore.pyqtProperty": "QtCore.Property",
"QtCore.pyqtSignal": "QtCore.Signal",
"QtCore.pyqtSlot": "QtCore.Slot",
"QtCore.QAbstractProxyModel": "QtCore.QAbstractProxyModel",
"QtCore.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel",
"QtCore.QStringListModel": "QtCore.QStringListModel",
"QtCore.QItemSelection": "QtCore.QItemSelection",
"QtCore.QItemSelectionModel": "QtCore.QItemSelectionModel",
"QtCore.QItemSelectionRange": "QtCore.QItemSelectionRange",
"uic.loadUi": ["QtCompat.loadUi", _loadUi],
"sip.wrapinstance": ["QtCompat.wrapInstance", _wrapinstance],
"sip.unwrapinstance": ["QtCompat.getCppPointer", _getcpppointer],
"QtWidgets.qApp": "QtWidgets.QApplication.instance()",
"QtCore.QCoreApplication.translate": [
"QtCompat.translate", _translate
],
"QtWidgets.QApplication.translate": [
"QtCompat.translate", _translate
],
"QtCore.qInstallMessageHandler": [
"QtCompat.qInstallMessageHandler", _qInstallMessageHandler
],
},
"PySide": {
"QtGui.QAbstractProxyModel": "QtCore.QAbstractProxyModel",
"QtGui.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel",
"QtGui.QStringListModel": "QtCore.QStringListModel",
"QtGui.QItemSelection": "QtCore.QItemSelection",
"QtGui.QItemSelectionModel": "QtCore.QItemSelectionModel",
"QtCore.Property": "QtCore.Property",
"QtCore.Signal": "QtCore.Signal",
"QtCore.Slot": "QtCore.Slot",
"QtGui.QItemSelectionRange": "QtCore.QItemSelectionRange",
"QtGui.QAbstractPrintDialog": "QtPrintSupport.QAbstractPrintDialog",
"QtGui.QPageSetupDialog": "QtPrintSupport.QPageSetupDialog",
"QtGui.QPrintDialog": "QtPrintSupport.QPrintDialog",
"QtGui.QPrintEngine": "QtPrintSupport.QPrintEngine",
"QtGui.QPrintPreviewDialog": "QtPrintSupport.QPrintPreviewDialog",
"QtGui.QPrintPreviewWidget": "QtPrintSupport.QPrintPreviewWidget",
"QtGui.QPrinter": "QtPrintSupport.QPrinter",
"QtGui.QPrinterInfo": "QtPrintSupport.QPrinterInfo",
"QtUiTools.QUiLoader": ["QtCompat.loadUi", _loadUi],
"shiboken.wrapInstance": ["QtCompat.wrapInstance", _wrapinstance],
"shiboken.unwrapInstance": ["QtCompat.getCppPointer", _getcpppointer],
"QtGui.qApp": "QtWidgets.QApplication.instance()",
"QtCore.QCoreApplication.translate": [
"QtCompat.translate", _translate
],
"QtGui.QApplication.translate": [
"QtCompat.translate", _translate
],
"QtCore.qInstallMsgHandler": [
"QtCompat.qInstallMessageHandler", _qInstallMessageHandler
],
},
"PyQt4": {
"QtGui.QAbstractProxyModel": "QtCore.QAbstractProxyModel",
"QtGui.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel",
"QtGui.QItemSelection": "QtCore.QItemSelection",
"QtGui.QStringListModel": "QtCore.QStringListModel",
"QtGui.QItemSelectionModel": "QtCore.QItemSelectionModel",
"QtCore.pyqtProperty": "QtCore.Property",
"QtCore.pyqtSignal": "QtCore.Signal",
"QtCore.pyqtSlot": "QtCore.Slot",
"QtGui.QItemSelectionRange": "QtCore.QItemSelectionRange",
"QtGui.QAbstractPrintDialog": "QtPrintSupport.QAbstractPrintDialog",
"QtGui.QPageSetupDialog": "QtPrintSupport.QPageSetupDialog",
"QtGui.QPrintDialog": "QtPrintSupport.QPrintDialog",
"QtGui.QPrintEngine": "QtPrintSupport.QPrintEngine",
"QtGui.QPrintPreviewDialog": "QtPrintSupport.QPrintPreviewDialog",
"QtGui.QPrintPreviewWidget": "QtPrintSupport.QPrintPreviewWidget",
"QtGui.QPrinter": "QtPrintSupport.QPrinter",
"QtGui.QPrinterInfo": "QtPrintSupport.QPrinterInfo",
# "QtCore.pyqtSignature": "QtCore.Slot",
"uic.loadUi": ["QtCompat.loadUi", _loadUi],
"sip.wrapinstance": ["QtCompat.wrapInstance", _wrapinstance],
"sip.unwrapinstance": ["QtCompat.getCppPointer", _getcpppointer],
"QtCore.QString": "str",
"QtGui.qApp": "QtWidgets.QApplication.instance()",
"QtCore.QCoreApplication.translate": [
"QtCompat.translate", _translate
],
"QtGui.QApplication.translate": [
"QtCompat.translate", _translate
],
"QtCore.qInstallMsgHandler": [
"QtCompat.qInstallMessageHandler", _qInstallMessageHandler
],
}
}
""" Compatibility Members
This dictionary is used to build Qt.QtCompat objects that provide a consistent
interface for obsolete members, and differences in binding return values.
{
"binding": {
"classname": {
"targetname": "binding_namespace",
}
}
}
"""
_compatibility_members = {
"PySide2": {
"QWidget": {
"grab": "QtWidgets.QWidget.grab",
},
"QHeaderView": {
"sectionsClickable": "QtWidgets.QHeaderView.sectionsClickable",
"setSectionsClickable":
"QtWidgets.QHeaderView.setSectionsClickable",
"sectionResizeMode": "QtWidgets.QHeaderView.sectionResizeMode",
"setSectionResizeMode":
"QtWidgets.QHeaderView.setSectionResizeMode",
"sectionsMovable": "QtWidgets.QHeaderView.sectionsMovable",
"setSectionsMovable": "QtWidgets.QHeaderView.setSectionsMovable",
},
"QFileDialog": {
"getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName",
"getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames",
"getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName",
},
},
"PyQt5": {
"QWidget": {
"grab": "QtWidgets.QWidget.grab",
},
"QHeaderView": {
"sectionsClickable": "QtWidgets.QHeaderView.sectionsClickable",
"setSectionsClickable":
"QtWidgets.QHeaderView.setSectionsClickable",
"sectionResizeMode": "QtWidgets.QHeaderView.sectionResizeMode",
"setSectionResizeMode":
"QtWidgets.QHeaderView.setSectionResizeMode",
"sectionsMovable": "QtWidgets.QHeaderView.sectionsMovable",
"setSectionsMovable": "QtWidgets.QHeaderView.setSectionsMovable",
},
"QFileDialog": {
"getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName",
"getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames",
"getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName",
},
},
"PySide": {
"QWidget": {
"grab": "QtWidgets.QPixmap.grabWidget",
},
"QHeaderView": {
"sectionsClickable": "QtWidgets.QHeaderView.isClickable",
"setSectionsClickable": "QtWidgets.QHeaderView.setClickable",
"sectionResizeMode": "QtWidgets.QHeaderView.resizeMode",
"setSectionResizeMode": "QtWidgets.QHeaderView.setResizeMode",
"sectionsMovable": "QtWidgets.QHeaderView.isMovable",
"setSectionsMovable": "QtWidgets.QHeaderView.setMovable",
},
"QFileDialog": {
"getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName",
"getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames",
"getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName",
},
},
"PyQt4": {
"QWidget": {
"grab": "QtWidgets.QPixmap.grabWidget",
},
"QHeaderView": {
"sectionsClickable": "QtWidgets.QHeaderView.isClickable",
"setSectionsClickable": "QtWidgets.QHeaderView.setClickable",
"sectionResizeMode": "QtWidgets.QHeaderView.resizeMode",
"setSectionResizeMode": "QtWidgets.QHeaderView.setResizeMode",
"sectionsMovable": "QtWidgets.QHeaderView.isMovable",
"setSectionsMovable": "QtWidgets.QHeaderView.setMovable",
},
"QFileDialog": {
"getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName",
"getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames",
"getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName",
},
},
}
def _apply_site_config():
try:
import QtSiteConfig
except ImportError:
# If no QtSiteConfig module found, no modifications
# to _common_members are needed.
pass
else:
# Provide the ability to modify the dicts used to build Qt.py
if hasattr(QtSiteConfig, 'update_members'):
QtSiteConfig.update_members(_common_members)
if hasattr(QtSiteConfig, 'update_misplaced_members'):
QtSiteConfig.update_misplaced_members(members=_misplaced_members)
if hasattr(QtSiteConfig, 'update_compatibility_members'):
QtSiteConfig.update_compatibility_members(
members=_compatibility_members)
def _new_module(name):
return types.ModuleType(__name__ + "." + name)
def _import_sub_module(module, name):
"""import_sub_module will mimic the function of importlib.import_module"""
module = __import__(module.__name__ + "." + name)
for level in name.split("."):
module = getattr(module, level)
return module
def _setup(module, extras):
"""Install common submodules"""
Qt.__binding__ = module.__name__
for name in list(_common_members) + extras:
try:
submodule = _import_sub_module(
module, name)
except ImportError:
try:
# For extra modules like sip and shiboken that may not be
# children of the binding.
submodule = __import__(name)
except ImportError:
continue
setattr(Qt, "_" + name, submodule)
if name not in extras:
# Store reference to original binding,
# but don't store speciality modules
# such as uic or QtUiTools
setattr(Qt, name, _new_module(name))
def _reassign_misplaced_members(binding):
"""Apply misplaced members from `binding` to Qt.py
Arguments:
binding (dict): Misplaced members
"""
for src, dst in _misplaced_members[binding].items():
dst_value = None
src_parts = src.split(".")
src_module = src_parts[0]
src_member = None
if len(src_parts) > 1:
src_member = src_parts[1:]
if isinstance(dst, (list, tuple)):
dst, dst_value = dst
dst_parts = dst.split(".")
dst_module = dst_parts[0]
dst_member = None
if len(dst_parts) > 1:
dst_member = dst_parts[1]
# Get the member we want to store in the namesapce.
if not dst_value:
try:
_part = getattr(Qt, "_" + src_module)
while src_member:
member = src_member.pop(0)
_part = getattr(_part, member)
dst_value = _part
except AttributeError:
# If the member we want to store in the namespace does not
# exist, there is no need to continue. This can happen if a
# request was made to rename a member that didn't exist, for
# example if QtWidgets isn't available on the target platform.
_log("Misplaced member has no source: {}".format(src))
continue
try:
src_object = getattr(Qt, dst_module)
except AttributeError:
if dst_module not in _common_members:
# Only create the Qt parent module if its listed in
# _common_members. Without this check, if you remove QtCore
# from _common_members, the default _misplaced_members will add
# Qt.QtCore so it can add Signal, Slot, etc.
msg = 'Not creating missing member module "{m}" for "{c}"'
_log(msg.format(m=dst_module, c=dst_member))
continue
# If the dst is valid but the Qt parent module does not exist
# then go ahead and create a new module to contain the member.
setattr(Qt, dst_module, _new_module(dst_module))
src_object = getattr(Qt, dst_module)
# Enable direct import of the new module
sys.modules[__name__ + "." + dst_module] = src_object
if not dst_value:
dst_value = getattr(Qt, "_" + src_module)
if src_member:
dst_value = getattr(dst_value, src_member)
setattr(
src_object,
dst_member or dst_module,
dst_value
)
def _build_compatibility_members(binding, decorators=None):
"""Apply `binding` to QtCompat
Arguments:
binding (str): Top level binding in _compatibility_members.
decorators (dict, optional): Provides the ability to decorate the
original Qt methods when needed by a binding. This can be used
to change the returned value to a standard value. The key should
be the classname, the value is a dict where the keys are the
target method names, and the values are the decorator functions.
"""
decorators = decorators or dict()
# Allow optional site-level customization of the compatibility members.
# This method does not need to be implemented in QtSiteConfig.
try:
import QtSiteConfig
except ImportError:
pass
else:
if hasattr(QtSiteConfig, 'update_compatibility_decorators'):
QtSiteConfig.update_compatibility_decorators(binding, decorators)
_QtCompat = type("QtCompat", (object,), {})
for classname, bindings in _compatibility_members[binding].items():
attrs = {}
for target, binding in bindings.items():
namespaces = binding.split('.')
try:
src_object = getattr(Qt, "_" + namespaces[0])
except AttributeError as e:
_log("QtCompat: AttributeError: %s" % e)
# Skip reassignment of non-existing members.
# This can happen if a request was made to
# rename a member that didn't exist, for example
# if QtWidgets isn't available on the target platform.
continue
# Walk down any remaining namespace getting the object assuming
# that if the first namespace exists the rest will exist.
for namespace in namespaces[1:]:
src_object = getattr(src_object, namespace)
# decorate the Qt method if a decorator was provided.
if target in decorators.get(classname, []):
# staticmethod must be called on the decorated method to
# prevent a TypeError being raised when the decorated method
# is called.
src_object = staticmethod(
decorators[classname][target](src_object))
attrs[target] = src_object
# Create the QtCompat class and install it into the namespace
compat_class = type(classname, (_QtCompat,), attrs)
setattr(Qt.QtCompat, classname, compat_class)
def _pyside2():
"""Initialise PySide2
These functions serve to test the existence of a binding
along with set it up in such a way that it aligns with
the final step; adding members from the original binding
to Qt.py
"""
import PySide2 as module
extras = ["QtUiTools"]
try:
try:
# Before merge of PySide and shiboken
import shiboken2
except ImportError:
# After merge of PySide and shiboken, May 2017
from PySide2 import shiboken2
extras.append("shiboken2")
except ImportError:
pass
_setup(module, extras)
Qt.__binding_version__ = module.__version__
if hasattr(Qt, "_shiboken2"):
Qt.QtCompat.wrapInstance = _wrapinstance
Qt.QtCompat.getCppPointer = _getcpppointer
if hasattr(Qt, "_QtUiTools"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtCore"):
Qt.__qt_version__ = Qt._QtCore.qVersion()
if hasattr(Qt, "_QtWidgets"):
Qt.QtCompat.setSectionResizeMode = \
Qt._QtWidgets.QHeaderView.setSectionResizeMode
_reassign_misplaced_members("PySide2")
_build_compatibility_members("PySide2")
def _pyqt5():
"""Initialise PyQt5"""
import PyQt5 as module
extras = ["uic"]
try:
import sip
extras.append(sip.__name__)
except ImportError:
sip = None
_setup(module, extras)
if hasattr(Qt, "_sip"):
Qt.QtCompat.wrapInstance = _wrapinstance
Qt.QtCompat.getCppPointer = _getcpppointer
if hasattr(Qt, "_uic"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtCore"):
Qt.__binding_version__ = Qt._QtCore.PYQT_VERSION_STR
Qt.__qt_version__ = Qt._QtCore.QT_VERSION_STR
if hasattr(Qt, "_QtWidgets"):
Qt.QtCompat.setSectionResizeMode = \
Qt._QtWidgets.QHeaderView.setSectionResizeMode
_reassign_misplaced_members("PyQt5")
_build_compatibility_members('PyQt5')
def _pyqt4():
"""Initialise PyQt4"""
import sip
# Validation of envivornment variable. Prevents an error if
# the variable is invalid since it's just a hint.
try:
hint = int(QT_SIP_API_HINT)
except TypeError:
hint = None # Variable was None, i.e. not set.
except ValueError:
raise ImportError("QT_SIP_API_HINT=%s must be a 1 or 2")
for api in ("QString",
"QVariant",
"QDate",
"QDateTime",
"QTextStream",
"QTime",
"QUrl"):
try:
sip.setapi(api, hint or 2)
except AttributeError:
raise ImportError("PyQt4 < 4.6 isn't supported by Qt.py")
except ValueError:
actual = sip.getapi(api)
if not hint:
raise ImportError("API version already set to %d" % actual)
else:
# Having provided a hint indicates a soft constraint, one
# that doesn't throw an exception.
sys.stderr.write(
"Warning: API '%s' has already been set to %d.\n"
% (api, actual)
)
import PyQt4 as module
extras = ["uic"]
try:
import sip
extras.append(sip.__name__)
except ImportError:
sip = None
_setup(module, extras)
if hasattr(Qt, "_sip"):
Qt.QtCompat.wrapInstance = _wrapinstance
Qt.QtCompat.getCppPointer = _getcpppointer
if hasattr(Qt, "_uic"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtGui"):
setattr(Qt, "QtWidgets", _new_module("QtWidgets"))
setattr(Qt, "_QtWidgets", Qt._QtGui)
if hasattr(Qt._QtGui, "QX11Info"):
setattr(Qt, "QtX11Extras", _new_module("QtX11Extras"))
Qt.QtX11Extras.QX11Info = Qt._QtGui.QX11Info
Qt.QtCompat.setSectionResizeMode = \
Qt._QtGui.QHeaderView.setResizeMode
if hasattr(Qt, "_QtCore"):
Qt.__binding_version__ = Qt._QtCore.PYQT_VERSION_STR
Qt.__qt_version__ = Qt._QtCore.QT_VERSION_STR
_reassign_misplaced_members("PyQt4")
# QFileDialog QtCompat decorator
def _standardizeQFileDialog(some_function):
"""Decorator that makes PyQt4 return conform to other bindings"""
def wrapper(*args, **kwargs):
ret = (some_function(*args, **kwargs))
# PyQt4 only returns the selected filename, force it to a
# standard return of the selected filename, and a empty string
# for the selected filter
return ret, ''
wrapper.__doc__ = some_function.__doc__
wrapper.__name__ = some_function.__name__
return wrapper
decorators = {
"QFileDialog": {
"getOpenFileName": _standardizeQFileDialog,
"getOpenFileNames": _standardizeQFileDialog,
"getSaveFileName": _standardizeQFileDialog,
}
}
_build_compatibility_members('PyQt4', decorators)
def _none():
"""Internal option (used in installer)"""
Mock = type("Mock", (), {"__getattr__": lambda Qt, attr: None})
Qt.__binding__ = "None"
Qt.__qt_version__ = "0.0.0"
Qt.__binding_version__ = "0.0.0"
Qt.QtCompat.loadUi = lambda uifile, baseinstance=None: None
Qt.QtCompat.setSectionResizeMode = lambda *args, **kwargs: None
for submodule in _common_members.keys():
setattr(Qt, submodule, Mock())
setattr(Qt, "_" + submodule, Mock())
def _log(text):
if QT_VERBOSE:
sys.stdout.write(text + "\n")
def _convert(lines):
"""Convert compiled .ui file from PySide2 to Qt.py
Arguments:
lines (list): Each line of of .ui file
Usage:
>> with open("myui.py") as f:
.. lines = _convert(f.readlines())
"""
def parse(line):
line = line.replace("from PySide2 import", "from Qt import QtCompat,")
line = line.replace("QtWidgets.QApplication.translate",
"QtCompat.translate")
if "QtCore.SIGNAL" in line:
raise NotImplementedError("QtCore.SIGNAL is missing from PyQt5 "
"and so Qt.py does not support it: you "
"should avoid defining signals inside "
"your ui files.")
return line
parsed = list()
for line in lines:
line = parse(line)
parsed.append(line)
return parsed
def _cli(args):
"""Qt.py command-line interface"""
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--convert",
help="Path to compiled Python module, e.g. my_ui.py")
parser.add_argument("--compile",
help="Accept raw .ui file and compile with native "
"PySide2 compiler.")
parser.add_argument("--stdout",
help="Write to stdout instead of file",
action="store_true")
parser.add_argument("--stdin",
help="Read from stdin instead of file",
action="store_true")
args = parser.parse_args(args)
if args.stdout:
raise NotImplementedError("--stdout")
if args.stdin:
raise NotImplementedError("--stdin")
if args.compile:
raise NotImplementedError("--compile")
if args.convert:
sys.stdout.write("#\n"
"# WARNING: --convert is an ALPHA feature.\n#\n"
"# See https://github.com/mottosso/Qt.py/pull/132\n"
"# for details.\n"
"#\n")
#
# ------> Read
#
with open(args.convert) as f:
lines = _convert(f.readlines())
backup = "%s_backup%s" % os.path.splitext(args.convert)
sys.stdout.write("Creating \"%s\"..\n" % backup)
shutil.copy(args.convert, backup)
#
# <------ Write
#
with open(args.convert, "w") as f:
f.write("".join(lines))
sys.stdout.write("Successfully converted \"%s\"\n" % args.convert)
def _install():
# Default order (customise order and content via QT_PREFERRED_BINDING)
default_order = ("PySide2", "PyQt5", "PySide", "PyQt4")
preferred_order = list(
b for b in QT_PREFERRED_BINDING.split(os.pathsep) if b
)
order = preferred_order or default_order
available = {
"PySide2": _pyside2,
"PyQt5": _pyqt5,
"PySide": _pyside,
"PyQt4": _pyqt4,
"None": _none
}
_log("Order: '%s'" % "', '".join(order))
# Allow site-level customization of the available modules.
_apply_site_config()
found_binding = False
for name in order:
_log("Trying %s" % name)
try:
available[name]()
found_binding = True
break
except ImportError as e:
_log("ImportError: %s" % e)
except KeyError:
_log("ImportError: Preferred binding '%s' not found." % name)
if not found_binding:
# If not binding were found, throw this error
raise ImportError("No Qt binding were found.")
# Install individual members
for name, members in _common_members.items():
try:
their_submodule = getattr(Qt, "_%s" % name)
except AttributeError:
continue
our_submodule = getattr(Qt, name)
# Enable import *
__all__.append(name)
# Enable direct import of submodule,
# e.g. import Qt.QtCore
sys.modules[__name__ + "." + name] = our_submodule
for member in members:
# Accept that a submodule may miss certain members.
try:
their_member = getattr(their_submodule, member)
except AttributeError:
_log("'%s.%s' was missing." % (name, member))
continue
setattr(our_submodule, member, their_member)
# Enable direct import of QtCompat
sys.modules['Qt.QtCompat'] = Qt.QtCompat
# Backwards compatibility
if hasattr(Qt.QtCompat, 'loadUi'):
Qt.QtCompat.load_ui = Qt.QtCompat.loadUi
_install()
# Setup Binding Enum states
Qt.IsPySide2 = Qt.__binding__ == 'PySide2'
Qt.IsPyQt5 = Qt.__binding__ == 'PyQt5'
Qt.IsPySide = Qt.__binding__ == 'PySide'
Qt.IsPyQt4 = Qt.__binding__ == 'PyQt4'
"""Augment QtCompat
QtCompat contains wrappers and added functionality
to the original bindings, such as the CLI interface
and otherwise incompatible members between bindings,
such as `QHeaderView.setSectionResizeMode`.
"""
Qt.QtCompat._cli = _cli
Qt.QtCompat._convert = _convert
# Enable command-line interface
if __name__ == "__main__":
_cli(sys.argv[1:])
# The MIT License (MIT)
#
# Copyright (c) 2016-2017 Marcus Ottosson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# In PySide(2), loadUi does not exist, so we implement it
#
# `_UiLoader` is adapted from the qtpy project, which was further influenced
# by qt-helpers which was released under a 3-clause BSD license which in turn
# is based on a solution at:
#
# - https://gist.github.com/cpbotha/1b42a20c8f3eb9bb7cb8
#
# The License for this code is as follows:
#
# qt-helpers - a common front-end to various Qt modules
#
# Copyright (c) 2015, Chris Beaumont and Thomas Robitaille
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the
# distribution.
# * Neither the name of the Glue project nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Which itself was based on the solution at
#
# https://gist.github.com/cpbotha/1b42a20c8f3eb9bb7cb8
#
# which was released under the MIT license:
#
# Copyright (c) 2011 Sebastian Wiesner <lunaryorn@gmail.com>
# Modifications by Charl Botha <cpbotha@vxlabs.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files
# (the "Software"),to deal in the Software without restriction,
# including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
pyblish/pyblish-lite | pyblish_lite/vendor/Qt.py | _pyqt5 | python | def _pyqt5():
import PyQt5 as module
extras = ["uic"]
try:
import sip
extras.append(sip.__name__)
except ImportError:
sip = None
_setup(module, extras)
if hasattr(Qt, "_sip"):
Qt.QtCompat.wrapInstance = _wrapinstance
Qt.QtCompat.getCppPointer = _getcpppointer
if hasattr(Qt, "_uic"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtCore"):
Qt.__binding_version__ = Qt._QtCore.PYQT_VERSION_STR
Qt.__qt_version__ = Qt._QtCore.QT_VERSION_STR
if hasattr(Qt, "_QtWidgets"):
Qt.QtCompat.setSectionResizeMode = \
Qt._QtWidgets.QHeaderView.setSectionResizeMode
_reassign_misplaced_members("PyQt5")
_build_compatibility_members('PyQt5') | Initialise PyQt5 | train | https://github.com/pyblish/pyblish-lite/blob/9172b81c7ae19a36e99c89dd16e102201992dc20/pyblish_lite/vendor/Qt.py#L1405-L1433 | null | """Minimal Python 2 & 3 shim around all Qt bindings
DOCUMENTATION
Qt.py was born in the film and visual effects industry to address
the growing need for the development of software capable of running
with more than one flavour of the Qt bindings for Python - PySide,
PySide2, PyQt4 and PyQt5.
1. Build for one, run with all
2. Explicit is better than implicit
3. Support co-existence
Default resolution order:
- PySide2
- PyQt5
- PySide
- PyQt4
Usage:
>> import sys
>> from Qt import QtWidgets
>> app = QtWidgets.QApplication(sys.argv)
>> button = QtWidgets.QPushButton("Hello World")
>> button.show()
>> app.exec_()
All members of PySide2 are mapped from other bindings, should they exist.
If no equivalent member exist, it is excluded from Qt.py and inaccessible.
The idea is to highlight members that exist across all supported binding,
and guarantee that code that runs on one binding runs on all others.
For more details, visit https://github.com/mottosso/Qt.py
LICENSE
See end of file for license (MIT, BSD) information.
"""
import os
import sys
import types
import shutil
__version__ = "1.2.0.b2"
# Enable support for `from Qt import *`
__all__ = []
# Flags from environment variables
QT_VERBOSE = bool(os.getenv("QT_VERBOSE"))
QT_PREFERRED_BINDING = os.getenv("QT_PREFERRED_BINDING", "")
QT_SIP_API_HINT = os.getenv("QT_SIP_API_HINT")
# Reference to Qt.py
Qt = sys.modules[__name__]
Qt.QtCompat = types.ModuleType("QtCompat")
try:
long
except NameError:
# Python 3 compatibility
long = int
"""Common members of all bindings
This is where each member of Qt.py is explicitly defined.
It is based on a "lowest common denominator" of all bindings;
including members found in each of the 4 bindings.
The "_common_members" dictionary is generated using the
build_membership.sh script.
"""
_common_members = {
"QtCore": [
"QAbstractAnimation",
"QAbstractEventDispatcher",
"QAbstractItemModel",
"QAbstractListModel",
"QAbstractState",
"QAbstractTableModel",
"QAbstractTransition",
"QAnimationGroup",
"QBasicTimer",
"QBitArray",
"QBuffer",
"QByteArray",
"QByteArrayMatcher",
"QChildEvent",
"QCoreApplication",
"QCryptographicHash",
"QDataStream",
"QDate",
"QDateTime",
"QDir",
"QDirIterator",
"QDynamicPropertyChangeEvent",
"QEasingCurve",
"QElapsedTimer",
"QEvent",
"QEventLoop",
"QEventTransition",
"QFile",
"QFileInfo",
"QFileSystemWatcher",
"QFinalState",
"QGenericArgument",
"QGenericReturnArgument",
"QHistoryState",
"QItemSelectionRange",
"QIODevice",
"QLibraryInfo",
"QLine",
"QLineF",
"QLocale",
"QMargins",
"QMetaClassInfo",
"QMetaEnum",
"QMetaMethod",
"QMetaObject",
"QMetaProperty",
"QMimeData",
"QModelIndex",
"QMutex",
"QMutexLocker",
"QObject",
"QParallelAnimationGroup",
"QPauseAnimation",
"QPersistentModelIndex",
"QPluginLoader",
"QPoint",
"QPointF",
"QProcess",
"QProcessEnvironment",
"QPropertyAnimation",
"QReadLocker",
"QReadWriteLock",
"QRect",
"QRectF",
"QRegExp",
"QResource",
"QRunnable",
"QSemaphore",
"QSequentialAnimationGroup",
"QSettings",
"QSignalMapper",
"QSignalTransition",
"QSize",
"QSizeF",
"QSocketNotifier",
"QState",
"QStateMachine",
"QSysInfo",
"QSystemSemaphore",
"QT_TRANSLATE_NOOP",
"QT_TR_NOOP",
"QT_TR_NOOP_UTF8",
"QTemporaryFile",
"QTextBoundaryFinder",
"QTextCodec",
"QTextDecoder",
"QTextEncoder",
"QTextStream",
"QTextStreamManipulator",
"QThread",
"QThreadPool",
"QTime",
"QTimeLine",
"QTimer",
"QTimerEvent",
"QTranslator",
"QUrl",
"QVariantAnimation",
"QWaitCondition",
"QWriteLocker",
"QXmlStreamAttribute",
"QXmlStreamAttributes",
"QXmlStreamEntityDeclaration",
"QXmlStreamEntityResolver",
"QXmlStreamNamespaceDeclaration",
"QXmlStreamNotationDeclaration",
"QXmlStreamReader",
"QXmlStreamWriter",
"Qt",
"QtCriticalMsg",
"QtDebugMsg",
"QtFatalMsg",
"QtMsgType",
"QtSystemMsg",
"QtWarningMsg",
"qAbs",
"qAddPostRoutine",
"qChecksum",
"qCritical",
"qDebug",
"qFatal",
"qFuzzyCompare",
"qIsFinite",
"qIsInf",
"qIsNaN",
"qIsNull",
"qRegisterResourceData",
"qUnregisterResourceData",
"qVersion",
"qWarning",
"qrand",
"qsrand"
],
"QtGui": [
"QAbstractTextDocumentLayout",
"QActionEvent",
"QBitmap",
"QBrush",
"QClipboard",
"QCloseEvent",
"QColor",
"QConicalGradient",
"QContextMenuEvent",
"QCursor",
"QDesktopServices",
"QDoubleValidator",
"QDrag",
"QDragEnterEvent",
"QDragLeaveEvent",
"QDragMoveEvent",
"QDropEvent",
"QFileOpenEvent",
"QFocusEvent",
"QFont",
"QFontDatabase",
"QFontInfo",
"QFontMetrics",
"QFontMetricsF",
"QGradient",
"QHelpEvent",
"QHideEvent",
"QHoverEvent",
"QIcon",
"QIconDragEvent",
"QIconEngine",
"QImage",
"QImageIOHandler",
"QImageReader",
"QImageWriter",
"QInputEvent",
"QInputMethodEvent",
"QIntValidator",
"QKeyEvent",
"QKeySequence",
"QLinearGradient",
"QMatrix2x2",
"QMatrix2x3",
"QMatrix2x4",
"QMatrix3x2",
"QMatrix3x3",
"QMatrix3x4",
"QMatrix4x2",
"QMatrix4x3",
"QMatrix4x4",
"QMouseEvent",
"QMoveEvent",
"QMovie",
"QPaintDevice",
"QPaintEngine",
"QPaintEngineState",
"QPaintEvent",
"QPainter",
"QPainterPath",
"QPainterPathStroker",
"QPalette",
"QPen",
"QPicture",
"QPictureIO",
"QPixmap",
"QPixmapCache",
"QPolygon",
"QPolygonF",
"QQuaternion",
"QRadialGradient",
"QRegExpValidator",
"QRegion",
"QResizeEvent",
"QSessionManager",
"QShortcutEvent",
"QShowEvent",
"QStandardItem",
"QStandardItemModel",
"QStatusTipEvent",
"QSyntaxHighlighter",
"QTabletEvent",
"QTextBlock",
"QTextBlockFormat",
"QTextBlockGroup",
"QTextBlockUserData",
"QTextCharFormat",
"QTextCursor",
"QTextDocument",
"QTextDocumentFragment",
"QTextFormat",
"QTextFragment",
"QTextFrame",
"QTextFrameFormat",
"QTextImageFormat",
"QTextInlineObject",
"QTextItem",
"QTextLayout",
"QTextLength",
"QTextLine",
"QTextList",
"QTextListFormat",
"QTextObject",
"QTextObjectInterface",
"QTextOption",
"QTextTable",
"QTextTableCell",
"QTextTableCellFormat",
"QTextTableFormat",
"QTouchEvent",
"QTransform",
"QValidator",
"QVector2D",
"QVector3D",
"QVector4D",
"QWhatsThisClickedEvent",
"QWheelEvent",
"QWindowStateChangeEvent",
"qAlpha",
"qBlue",
"qGray",
"qGreen",
"qIsGray",
"qRed",
"qRgb",
"qRgba"
],
"QtHelp": [
"QHelpContentItem",
"QHelpContentModel",
"QHelpContentWidget",
"QHelpEngine",
"QHelpEngineCore",
"QHelpIndexModel",
"QHelpIndexWidget",
"QHelpSearchEngine",
"QHelpSearchQuery",
"QHelpSearchQueryWidget",
"QHelpSearchResultWidget"
],
"QtMultimedia": [
"QAbstractVideoBuffer",
"QAbstractVideoSurface",
"QAudio",
"QAudioDeviceInfo",
"QAudioFormat",
"QAudioInput",
"QAudioOutput",
"QVideoFrame",
"QVideoSurfaceFormat"
],
"QtNetwork": [
"QAbstractNetworkCache",
"QAbstractSocket",
"QAuthenticator",
"QHostAddress",
"QHostInfo",
"QLocalServer",
"QLocalSocket",
"QNetworkAccessManager",
"QNetworkAddressEntry",
"QNetworkCacheMetaData",
"QNetworkConfiguration",
"QNetworkConfigurationManager",
"QNetworkCookie",
"QNetworkCookieJar",
"QNetworkDiskCache",
"QNetworkInterface",
"QNetworkProxy",
"QNetworkProxyFactory",
"QNetworkProxyQuery",
"QNetworkReply",
"QNetworkRequest",
"QNetworkSession",
"QSsl",
"QTcpServer",
"QTcpSocket",
"QUdpSocket"
],
"QtOpenGL": [
"QGL",
"QGLContext",
"QGLFormat",
"QGLWidget"
],
"QtPrintSupport": [
"QAbstractPrintDialog",
"QPageSetupDialog",
"QPrintDialog",
"QPrintEngine",
"QPrintPreviewDialog",
"QPrintPreviewWidget",
"QPrinter",
"QPrinterInfo"
],
"QtSql": [
"QSql",
"QSqlDatabase",
"QSqlDriver",
"QSqlDriverCreatorBase",
"QSqlError",
"QSqlField",
"QSqlIndex",
"QSqlQuery",
"QSqlQueryModel",
"QSqlRecord",
"QSqlRelation",
"QSqlRelationalDelegate",
"QSqlRelationalTableModel",
"QSqlResult",
"QSqlTableModel"
],
"QtSvg": [
"QGraphicsSvgItem",
"QSvgGenerator",
"QSvgRenderer",
"QSvgWidget"
],
"QtTest": [
"QTest"
],
"QtWidgets": [
"QAbstractButton",
"QAbstractGraphicsShapeItem",
"QAbstractItemDelegate",
"QAbstractItemView",
"QAbstractScrollArea",
"QAbstractSlider",
"QAbstractSpinBox",
"QAction",
"QActionGroup",
"QApplication",
"QBoxLayout",
"QButtonGroup",
"QCalendarWidget",
"QCheckBox",
"QColorDialog",
"QColumnView",
"QComboBox",
"QCommandLinkButton",
"QCommonStyle",
"QCompleter",
"QDataWidgetMapper",
"QDateEdit",
"QDateTimeEdit",
"QDesktopWidget",
"QDial",
"QDialog",
"QDialogButtonBox",
"QDirModel",
"QDockWidget",
"QDoubleSpinBox",
"QErrorMessage",
"QFileDialog",
"QFileIconProvider",
"QFileSystemModel",
"QFocusFrame",
"QFontComboBox",
"QFontDialog",
"QFormLayout",
"QFrame",
"QGesture",
"QGestureEvent",
"QGestureRecognizer",
"QGraphicsAnchor",
"QGraphicsAnchorLayout",
"QGraphicsBlurEffect",
"QGraphicsColorizeEffect",
"QGraphicsDropShadowEffect",
"QGraphicsEffect",
"QGraphicsEllipseItem",
"QGraphicsGridLayout",
"QGraphicsItem",
"QGraphicsItemGroup",
"QGraphicsLayout",
"QGraphicsLayoutItem",
"QGraphicsLineItem",
"QGraphicsLinearLayout",
"QGraphicsObject",
"QGraphicsOpacityEffect",
"QGraphicsPathItem",
"QGraphicsPixmapItem",
"QGraphicsPolygonItem",
"QGraphicsProxyWidget",
"QGraphicsRectItem",
"QGraphicsRotation",
"QGraphicsScale",
"QGraphicsScene",
"QGraphicsSceneContextMenuEvent",
"QGraphicsSceneDragDropEvent",
"QGraphicsSceneEvent",
"QGraphicsSceneHelpEvent",
"QGraphicsSceneHoverEvent",
"QGraphicsSceneMouseEvent",
"QGraphicsSceneMoveEvent",
"QGraphicsSceneResizeEvent",
"QGraphicsSceneWheelEvent",
"QGraphicsSimpleTextItem",
"QGraphicsTextItem",
"QGraphicsTransform",
"QGraphicsView",
"QGraphicsWidget",
"QGridLayout",
"QGroupBox",
"QHBoxLayout",
"QHeaderView",
"QInputDialog",
"QItemDelegate",
"QItemEditorCreatorBase",
"QItemEditorFactory",
"QKeyEventTransition",
"QLCDNumber",
"QLabel",
"QLayout",
"QLayoutItem",
"QLineEdit",
"QListView",
"QListWidget",
"QListWidgetItem",
"QMainWindow",
"QMdiArea",
"QMdiSubWindow",
"QMenu",
"QMenuBar",
"QMessageBox",
"QMouseEventTransition",
"QPanGesture",
"QPinchGesture",
"QPlainTextDocumentLayout",
"QPlainTextEdit",
"QProgressBar",
"QProgressDialog",
"QPushButton",
"QRadioButton",
"QRubberBand",
"QScrollArea",
"QScrollBar",
"QShortcut",
"QSizeGrip",
"QSizePolicy",
"QSlider",
"QSpacerItem",
"QSpinBox",
"QSplashScreen",
"QSplitter",
"QSplitterHandle",
"QStackedLayout",
"QStackedWidget",
"QStatusBar",
"QStyle",
"QStyleFactory",
"QStyleHintReturn",
"QStyleHintReturnMask",
"QStyleHintReturnVariant",
"QStyleOption",
"QStyleOptionButton",
"QStyleOptionComboBox",
"QStyleOptionComplex",
"QStyleOptionDockWidget",
"QStyleOptionFocusRect",
"QStyleOptionFrame",
"QStyleOptionGraphicsItem",
"QStyleOptionGroupBox",
"QStyleOptionHeader",
"QStyleOptionMenuItem",
"QStyleOptionProgressBar",
"QStyleOptionRubberBand",
"QStyleOptionSizeGrip",
"QStyleOptionSlider",
"QStyleOptionSpinBox",
"QStyleOptionTab",
"QStyleOptionTabBarBase",
"QStyleOptionTabWidgetFrame",
"QStyleOptionTitleBar",
"QStyleOptionToolBar",
"QStyleOptionToolBox",
"QStyleOptionToolButton",
"QStyleOptionViewItem",
"QStylePainter",
"QStyledItemDelegate",
"QSwipeGesture",
"QSystemTrayIcon",
"QTabBar",
"QTabWidget",
"QTableView",
"QTableWidget",
"QTableWidgetItem",
"QTableWidgetSelectionRange",
"QTapAndHoldGesture",
"QTapGesture",
"QTextBrowser",
"QTextEdit",
"QTimeEdit",
"QToolBar",
"QToolBox",
"QToolButton",
"QToolTip",
"QTreeView",
"QTreeWidget",
"QTreeWidgetItem",
"QTreeWidgetItemIterator",
"QUndoCommand",
"QUndoGroup",
"QUndoStack",
"QUndoView",
"QVBoxLayout",
"QWhatsThis",
"QWidget",
"QWidgetAction",
"QWidgetItem",
"QWizard",
"QWizardPage"
],
"QtX11Extras": [
"QX11Info"
],
"QtXml": [
"QDomAttr",
"QDomCDATASection",
"QDomCharacterData",
"QDomComment",
"QDomDocument",
"QDomDocumentFragment",
"QDomDocumentType",
"QDomElement",
"QDomEntity",
"QDomEntityReference",
"QDomImplementation",
"QDomNamedNodeMap",
"QDomNode",
"QDomNodeList",
"QDomNotation",
"QDomProcessingInstruction",
"QDomText",
"QXmlAttributes",
"QXmlContentHandler",
"QXmlDTDHandler",
"QXmlDeclHandler",
"QXmlDefaultHandler",
"QXmlEntityResolver",
"QXmlErrorHandler",
"QXmlInputSource",
"QXmlLexicalHandler",
"QXmlLocator",
"QXmlNamespaceSupport",
"QXmlParseException",
"QXmlReader",
"QXmlSimpleReader"
],
"QtXmlPatterns": [
"QAbstractMessageHandler",
"QAbstractUriResolver",
"QAbstractXmlNodeModel",
"QAbstractXmlReceiver",
"QSourceLocation",
"QXmlFormatter",
"QXmlItem",
"QXmlName",
"QXmlNamePool",
"QXmlNodeModelIndex",
"QXmlQuery",
"QXmlResultItems",
"QXmlSchema",
"QXmlSchemaValidator",
"QXmlSerializer"
]
}
def _qInstallMessageHandler(handler):
"""Install a message handler that works in all bindings
Args:
handler: A function that takes 3 arguments, or None
"""
def messageOutputHandler(*args):
# In Qt4 bindings, message handlers are passed 2 arguments
# In Qt5 bindings, message handlers are passed 3 arguments
# The first argument is a QtMsgType
# The last argument is the message to be printed
# The Middle argument (if passed) is a QMessageLogContext
if len(args) == 3:
msgType, logContext, msg = args
elif len(args) == 2:
msgType, msg = args
logContext = None
else:
raise TypeError(
"handler expected 2 or 3 arguments, got {0}".format(len(args)))
if isinstance(msg, bytes):
# In python 3, some bindings pass a bytestring, which cannot be
# used elsewhere. Decoding a python 2 or 3 bytestring object will
# consistently return a unicode object.
msg = msg.decode()
handler(msgType, logContext, msg)
passObject = messageOutputHandler if handler else handler
if Qt.IsPySide or Qt.IsPyQt4:
return Qt._QtCore.qInstallMsgHandler(passObject)
elif Qt.IsPySide2 or Qt.IsPyQt5:
return Qt._QtCore.qInstallMessageHandler(passObject)
def _getcpppointer(object):
if hasattr(Qt, "_shiboken2"):
return getattr(Qt, "_shiboken2").getCppPointer(object)[0]
elif hasattr(Qt, "_shiboken"):
return getattr(Qt, "_shiboken").getCppPointer(object)[0]
elif hasattr(Qt, "_sip"):
return getattr(Qt, "_sip").unwrapinstance(object)
raise AttributeError("'module' has no attribute 'getCppPointer'")
def _wrapinstance(ptr, base=None):
"""Enable implicit cast of pointer to most suitable class
This behaviour is available in sip per default.
Based on http://nathanhorne.com/pyqtpyside-wrap-instance
Usage:
This mechanism kicks in under these circumstances.
1. Qt.py is using PySide 1 or 2.
2. A `base` argument is not provided.
See :func:`QtCompat.wrapInstance()`
Arguments:
ptr (long): Pointer to QObject in memory
base (QObject, optional): Base class to wrap with. Defaults to QObject,
which should handle anything.
"""
assert isinstance(ptr, long), "Argument 'ptr' must be of type <long>"
assert (base is None) or issubclass(base, Qt.QtCore.QObject), (
"Argument 'base' must be of type <QObject>")
if Qt.IsPyQt4 or Qt.IsPyQt5:
func = getattr(Qt, "_sip").wrapinstance
elif Qt.IsPySide2:
func = getattr(Qt, "_shiboken2").wrapInstance
elif Qt.IsPySide:
func = getattr(Qt, "_shiboken").wrapInstance
else:
raise AttributeError("'module' has no attribute 'wrapInstance'")
if base is None:
q_object = func(long(ptr), Qt.QtCore.QObject)
meta_object = q_object.metaObject()
class_name = meta_object.className()
super_class_name = meta_object.superClass().className()
if hasattr(Qt.QtWidgets, class_name):
base = getattr(Qt.QtWidgets, class_name)
elif hasattr(Qt.QtWidgets, super_class_name):
base = getattr(Qt.QtWidgets, super_class_name)
else:
base = Qt.QtCore.QObject
return func(long(ptr), base)
def _translate(context, sourceText, *args):
# In Qt4 bindings, translate can be passed 2 or 3 arguments
# In Qt5 bindings, translate can be passed 2 arguments
# The first argument is disambiguation[str]
# The last argument is n[int]
# The middle argument can be encoding[QtCore.QCoreApplication.Encoding]
if len(args) == 3:
disambiguation, encoding, n = args
elif len(args) == 2:
disambiguation, n = args
encoding = None
else:
raise TypeError(
"Expected 4 or 5 arguments, got {0}.".format(len(args)+2))
if hasattr(Qt.QtCore, "QCoreApplication"):
app = getattr(Qt.QtCore, "QCoreApplication")
else:
raise NotImplementedError(
"Missing QCoreApplication implementation for {binding}".format(
binding=Qt.__binding__,
)
)
if Qt.__binding__ in ("PySide2", "PyQt5"):
sanitized_args = [context, sourceText, disambiguation, n]
else:
sanitized_args = [
context,
sourceText,
disambiguation,
encoding or app.CodecForTr,
n
]
return app.translate(*sanitized_args)
def _loadUi(uifile, baseinstance=None):
"""Dynamically load a user interface from the given `uifile`
This function calls `uic.loadUi` if using PyQt bindings,
else it implements a comparable binding for PySide.
Documentation:
http://pyqt.sourceforge.net/Docs/PyQt5/designer.html#PyQt5.uic.loadUi
Arguments:
uifile (str): Absolute path to Qt Designer file.
baseinstance (QWidget): Instantiated QWidget or subclass thereof
Return:
baseinstance if `baseinstance` is not `None`. Otherwise
return the newly created instance of the user interface.
"""
if hasattr(Qt, "_uic"):
return Qt._uic.loadUi(uifile, baseinstance)
elif hasattr(Qt, "_QtUiTools"):
# Implement `PyQt5.uic.loadUi` for PySide(2)
class _UiLoader(Qt._QtUiTools.QUiLoader):
"""Create the user interface in a base instance.
Unlike `Qt._QtUiTools.QUiLoader` itself this class does not
create a new instance of the top-level widget, but creates the user
interface in an existing instance of the top-level class if needed.
This mimics the behaviour of `PyQt5.uic.loadUi`.
"""
def __init__(self, baseinstance):
super(_UiLoader, self).__init__(baseinstance)
self.baseinstance = baseinstance
def load(self, uifile, *args, **kwargs):
from xml.etree.ElementTree import ElementTree
# For whatever reason, if this doesn't happen then
# reading an invalid or non-existing .ui file throws
# a RuntimeError.
etree = ElementTree()
etree.parse(uifile)
widget = Qt._QtUiTools.QUiLoader.load(
self, uifile, *args, **kwargs)
# Workaround for PySide 1.0.9, see issue #208
widget.parentWidget()
return widget
def createWidget(self, class_name, parent=None, name=""):
"""Called for each widget defined in ui file
Overridden here to populate `baseinstance` instead.
"""
if parent is None and self.baseinstance:
# Supposed to create the top-level widget,
# return the base instance instead
return self.baseinstance
# For some reason, Line is not in the list of available
# widgets, but works fine, so we have to special case it here.
if class_name in self.availableWidgets() + ["Line"]:
# Create a new widget for child widgets
widget = Qt._QtUiTools.QUiLoader.createWidget(self,
class_name,
parent,
name)
else:
raise Exception("Custom widget '%s' not supported"
% class_name)
if self.baseinstance:
# Set an attribute for the new child widget on the base
# instance, just like PyQt5.uic.loadUi does.
setattr(self.baseinstance, name, widget)
return widget
widget = _UiLoader(baseinstance).load(uifile)
Qt.QtCore.QMetaObject.connectSlotsByName(widget)
return widget
else:
raise NotImplementedError("No implementation available for loadUi")
"""Misplaced members
These members from the original submodule are misplaced relative PySide2
"""
_misplaced_members = {
"PySide2": {
"QtGui.QStringListModel": "QtCore.QStringListModel",
"QtCore.Property": "QtCore.Property",
"QtCore.Signal": "QtCore.Signal",
"QtCore.Slot": "QtCore.Slot",
"QtCore.QAbstractProxyModel": "QtCore.QAbstractProxyModel",
"QtCore.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel",
"QtCore.QItemSelection": "QtCore.QItemSelection",
"QtCore.QItemSelectionModel": "QtCore.QItemSelectionModel",
"QtCore.QItemSelectionRange": "QtCore.QItemSelectionRange",
"QtUiTools.QUiLoader": ["QtCompat.loadUi", _loadUi],
"shiboken2.wrapInstance": ["QtCompat.wrapInstance", _wrapinstance],
"shiboken2.getCppPointer": ["QtCompat.getCppPointer", _getcpppointer],
"QtWidgets.qApp": "QtWidgets.QApplication.instance()",
"QtCore.QCoreApplication.translate": [
"QtCompat.translate", _translate
],
"QtWidgets.QApplication.translate": [
"QtCompat.translate", _translate
],
"QtCore.qInstallMessageHandler": [
"QtCompat.qInstallMessageHandler", _qInstallMessageHandler
],
},
"PyQt5": {
"QtCore.pyqtProperty": "QtCore.Property",
"QtCore.pyqtSignal": "QtCore.Signal",
"QtCore.pyqtSlot": "QtCore.Slot",
"QtCore.QAbstractProxyModel": "QtCore.QAbstractProxyModel",
"QtCore.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel",
"QtCore.QStringListModel": "QtCore.QStringListModel",
"QtCore.QItemSelection": "QtCore.QItemSelection",
"QtCore.QItemSelectionModel": "QtCore.QItemSelectionModel",
"QtCore.QItemSelectionRange": "QtCore.QItemSelectionRange",
"uic.loadUi": ["QtCompat.loadUi", _loadUi],
"sip.wrapinstance": ["QtCompat.wrapInstance", _wrapinstance],
"sip.unwrapinstance": ["QtCompat.getCppPointer", _getcpppointer],
"QtWidgets.qApp": "QtWidgets.QApplication.instance()",
"QtCore.QCoreApplication.translate": [
"QtCompat.translate", _translate
],
"QtWidgets.QApplication.translate": [
"QtCompat.translate", _translate
],
"QtCore.qInstallMessageHandler": [
"QtCompat.qInstallMessageHandler", _qInstallMessageHandler
],
},
"PySide": {
"QtGui.QAbstractProxyModel": "QtCore.QAbstractProxyModel",
"QtGui.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel",
"QtGui.QStringListModel": "QtCore.QStringListModel",
"QtGui.QItemSelection": "QtCore.QItemSelection",
"QtGui.QItemSelectionModel": "QtCore.QItemSelectionModel",
"QtCore.Property": "QtCore.Property",
"QtCore.Signal": "QtCore.Signal",
"QtCore.Slot": "QtCore.Slot",
"QtGui.QItemSelectionRange": "QtCore.QItemSelectionRange",
"QtGui.QAbstractPrintDialog": "QtPrintSupport.QAbstractPrintDialog",
"QtGui.QPageSetupDialog": "QtPrintSupport.QPageSetupDialog",
"QtGui.QPrintDialog": "QtPrintSupport.QPrintDialog",
"QtGui.QPrintEngine": "QtPrintSupport.QPrintEngine",
"QtGui.QPrintPreviewDialog": "QtPrintSupport.QPrintPreviewDialog",
"QtGui.QPrintPreviewWidget": "QtPrintSupport.QPrintPreviewWidget",
"QtGui.QPrinter": "QtPrintSupport.QPrinter",
"QtGui.QPrinterInfo": "QtPrintSupport.QPrinterInfo",
"QtUiTools.QUiLoader": ["QtCompat.loadUi", _loadUi],
"shiboken.wrapInstance": ["QtCompat.wrapInstance", _wrapinstance],
"shiboken.unwrapInstance": ["QtCompat.getCppPointer", _getcpppointer],
"QtGui.qApp": "QtWidgets.QApplication.instance()",
"QtCore.QCoreApplication.translate": [
"QtCompat.translate", _translate
],
"QtGui.QApplication.translate": [
"QtCompat.translate", _translate
],
"QtCore.qInstallMsgHandler": [
"QtCompat.qInstallMessageHandler", _qInstallMessageHandler
],
},
"PyQt4": {
"QtGui.QAbstractProxyModel": "QtCore.QAbstractProxyModel",
"QtGui.QSortFilterProxyModel": "QtCore.QSortFilterProxyModel",
"QtGui.QItemSelection": "QtCore.QItemSelection",
"QtGui.QStringListModel": "QtCore.QStringListModel",
"QtGui.QItemSelectionModel": "QtCore.QItemSelectionModel",
"QtCore.pyqtProperty": "QtCore.Property",
"QtCore.pyqtSignal": "QtCore.Signal",
"QtCore.pyqtSlot": "QtCore.Slot",
"QtGui.QItemSelectionRange": "QtCore.QItemSelectionRange",
"QtGui.QAbstractPrintDialog": "QtPrintSupport.QAbstractPrintDialog",
"QtGui.QPageSetupDialog": "QtPrintSupport.QPageSetupDialog",
"QtGui.QPrintDialog": "QtPrintSupport.QPrintDialog",
"QtGui.QPrintEngine": "QtPrintSupport.QPrintEngine",
"QtGui.QPrintPreviewDialog": "QtPrintSupport.QPrintPreviewDialog",
"QtGui.QPrintPreviewWidget": "QtPrintSupport.QPrintPreviewWidget",
"QtGui.QPrinter": "QtPrintSupport.QPrinter",
"QtGui.QPrinterInfo": "QtPrintSupport.QPrinterInfo",
# "QtCore.pyqtSignature": "QtCore.Slot",
"uic.loadUi": ["QtCompat.loadUi", _loadUi],
"sip.wrapinstance": ["QtCompat.wrapInstance", _wrapinstance],
"sip.unwrapinstance": ["QtCompat.getCppPointer", _getcpppointer],
"QtCore.QString": "str",
"QtGui.qApp": "QtWidgets.QApplication.instance()",
"QtCore.QCoreApplication.translate": [
"QtCompat.translate", _translate
],
"QtGui.QApplication.translate": [
"QtCompat.translate", _translate
],
"QtCore.qInstallMsgHandler": [
"QtCompat.qInstallMessageHandler", _qInstallMessageHandler
],
}
}
""" Compatibility Members
This dictionary is used to build Qt.QtCompat objects that provide a consistent
interface for obsolete members, and differences in binding return values.
{
"binding": {
"classname": {
"targetname": "binding_namespace",
}
}
}
"""
_compatibility_members = {
"PySide2": {
"QWidget": {
"grab": "QtWidgets.QWidget.grab",
},
"QHeaderView": {
"sectionsClickable": "QtWidgets.QHeaderView.sectionsClickable",
"setSectionsClickable":
"QtWidgets.QHeaderView.setSectionsClickable",
"sectionResizeMode": "QtWidgets.QHeaderView.sectionResizeMode",
"setSectionResizeMode":
"QtWidgets.QHeaderView.setSectionResizeMode",
"sectionsMovable": "QtWidgets.QHeaderView.sectionsMovable",
"setSectionsMovable": "QtWidgets.QHeaderView.setSectionsMovable",
},
"QFileDialog": {
"getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName",
"getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames",
"getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName",
},
},
"PyQt5": {
"QWidget": {
"grab": "QtWidgets.QWidget.grab",
},
"QHeaderView": {
"sectionsClickable": "QtWidgets.QHeaderView.sectionsClickable",
"setSectionsClickable":
"QtWidgets.QHeaderView.setSectionsClickable",
"sectionResizeMode": "QtWidgets.QHeaderView.sectionResizeMode",
"setSectionResizeMode":
"QtWidgets.QHeaderView.setSectionResizeMode",
"sectionsMovable": "QtWidgets.QHeaderView.sectionsMovable",
"setSectionsMovable": "QtWidgets.QHeaderView.setSectionsMovable",
},
"QFileDialog": {
"getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName",
"getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames",
"getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName",
},
},
"PySide": {
"QWidget": {
"grab": "QtWidgets.QPixmap.grabWidget",
},
"QHeaderView": {
"sectionsClickable": "QtWidgets.QHeaderView.isClickable",
"setSectionsClickable": "QtWidgets.QHeaderView.setClickable",
"sectionResizeMode": "QtWidgets.QHeaderView.resizeMode",
"setSectionResizeMode": "QtWidgets.QHeaderView.setResizeMode",
"sectionsMovable": "QtWidgets.QHeaderView.isMovable",
"setSectionsMovable": "QtWidgets.QHeaderView.setMovable",
},
"QFileDialog": {
"getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName",
"getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames",
"getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName",
},
},
"PyQt4": {
"QWidget": {
"grab": "QtWidgets.QPixmap.grabWidget",
},
"QHeaderView": {
"sectionsClickable": "QtWidgets.QHeaderView.isClickable",
"setSectionsClickable": "QtWidgets.QHeaderView.setClickable",
"sectionResizeMode": "QtWidgets.QHeaderView.resizeMode",
"setSectionResizeMode": "QtWidgets.QHeaderView.setResizeMode",
"sectionsMovable": "QtWidgets.QHeaderView.isMovable",
"setSectionsMovable": "QtWidgets.QHeaderView.setMovable",
},
"QFileDialog": {
"getOpenFileName": "QtWidgets.QFileDialog.getOpenFileName",
"getOpenFileNames": "QtWidgets.QFileDialog.getOpenFileNames",
"getSaveFileName": "QtWidgets.QFileDialog.getSaveFileName",
},
},
}
def _apply_site_config():
try:
import QtSiteConfig
except ImportError:
# If no QtSiteConfig module found, no modifications
# to _common_members are needed.
pass
else:
# Provide the ability to modify the dicts used to build Qt.py
if hasattr(QtSiteConfig, 'update_members'):
QtSiteConfig.update_members(_common_members)
if hasattr(QtSiteConfig, 'update_misplaced_members'):
QtSiteConfig.update_misplaced_members(members=_misplaced_members)
if hasattr(QtSiteConfig, 'update_compatibility_members'):
QtSiteConfig.update_compatibility_members(
members=_compatibility_members)
def _new_module(name):
return types.ModuleType(__name__ + "." + name)
def _import_sub_module(module, name):
"""import_sub_module will mimic the function of importlib.import_module"""
module = __import__(module.__name__ + "." + name)
for level in name.split("."):
module = getattr(module, level)
return module
def _setup(module, extras):
"""Install common submodules"""
Qt.__binding__ = module.__name__
for name in list(_common_members) + extras:
try:
submodule = _import_sub_module(
module, name)
except ImportError:
try:
# For extra modules like sip and shiboken that may not be
# children of the binding.
submodule = __import__(name)
except ImportError:
continue
setattr(Qt, "_" + name, submodule)
if name not in extras:
# Store reference to original binding,
# but don't store speciality modules
# such as uic or QtUiTools
setattr(Qt, name, _new_module(name))
def _reassign_misplaced_members(binding):
"""Apply misplaced members from `binding` to Qt.py
Arguments:
binding (dict): Misplaced members
"""
for src, dst in _misplaced_members[binding].items():
dst_value = None
src_parts = src.split(".")
src_module = src_parts[0]
src_member = None
if len(src_parts) > 1:
src_member = src_parts[1:]
if isinstance(dst, (list, tuple)):
dst, dst_value = dst
dst_parts = dst.split(".")
dst_module = dst_parts[0]
dst_member = None
if len(dst_parts) > 1:
dst_member = dst_parts[1]
# Get the member we want to store in the namesapce.
if not dst_value:
try:
_part = getattr(Qt, "_" + src_module)
while src_member:
member = src_member.pop(0)
_part = getattr(_part, member)
dst_value = _part
except AttributeError:
# If the member we want to store in the namespace does not
# exist, there is no need to continue. This can happen if a
# request was made to rename a member that didn't exist, for
# example if QtWidgets isn't available on the target platform.
_log("Misplaced member has no source: {}".format(src))
continue
try:
src_object = getattr(Qt, dst_module)
except AttributeError:
if dst_module not in _common_members:
# Only create the Qt parent module if its listed in
# _common_members. Without this check, if you remove QtCore
# from _common_members, the default _misplaced_members will add
# Qt.QtCore so it can add Signal, Slot, etc.
msg = 'Not creating missing member module "{m}" for "{c}"'
_log(msg.format(m=dst_module, c=dst_member))
continue
# If the dst is valid but the Qt parent module does not exist
# then go ahead and create a new module to contain the member.
setattr(Qt, dst_module, _new_module(dst_module))
src_object = getattr(Qt, dst_module)
# Enable direct import of the new module
sys.modules[__name__ + "." + dst_module] = src_object
if not dst_value:
dst_value = getattr(Qt, "_" + src_module)
if src_member:
dst_value = getattr(dst_value, src_member)
setattr(
src_object,
dst_member or dst_module,
dst_value
)
def _build_compatibility_members(binding, decorators=None):
"""Apply `binding` to QtCompat
Arguments:
binding (str): Top level binding in _compatibility_members.
decorators (dict, optional): Provides the ability to decorate the
original Qt methods when needed by a binding. This can be used
to change the returned value to a standard value. The key should
be the classname, the value is a dict where the keys are the
target method names, and the values are the decorator functions.
"""
decorators = decorators or dict()
# Allow optional site-level customization of the compatibility members.
# This method does not need to be implemented in QtSiteConfig.
try:
import QtSiteConfig
except ImportError:
pass
else:
if hasattr(QtSiteConfig, 'update_compatibility_decorators'):
QtSiteConfig.update_compatibility_decorators(binding, decorators)
_QtCompat = type("QtCompat", (object,), {})
for classname, bindings in _compatibility_members[binding].items():
attrs = {}
for target, binding in bindings.items():
namespaces = binding.split('.')
try:
src_object = getattr(Qt, "_" + namespaces[0])
except AttributeError as e:
_log("QtCompat: AttributeError: %s" % e)
# Skip reassignment of non-existing members.
# This can happen if a request was made to
# rename a member that didn't exist, for example
# if QtWidgets isn't available on the target platform.
continue
# Walk down any remaining namespace getting the object assuming
# that if the first namespace exists the rest will exist.
for namespace in namespaces[1:]:
src_object = getattr(src_object, namespace)
# decorate the Qt method if a decorator was provided.
if target in decorators.get(classname, []):
# staticmethod must be called on the decorated method to
# prevent a TypeError being raised when the decorated method
# is called.
src_object = staticmethod(
decorators[classname][target](src_object))
attrs[target] = src_object
# Create the QtCompat class and install it into the namespace
compat_class = type(classname, (_QtCompat,), attrs)
setattr(Qt.QtCompat, classname, compat_class)
def _pyside2():
"""Initialise PySide2
These functions serve to test the existence of a binding
along with set it up in such a way that it aligns with
the final step; adding members from the original binding
to Qt.py
"""
import PySide2 as module
extras = ["QtUiTools"]
try:
try:
# Before merge of PySide and shiboken
import shiboken2
except ImportError:
# After merge of PySide and shiboken, May 2017
from PySide2 import shiboken2
extras.append("shiboken2")
except ImportError:
pass
_setup(module, extras)
Qt.__binding_version__ = module.__version__
if hasattr(Qt, "_shiboken2"):
Qt.QtCompat.wrapInstance = _wrapinstance
Qt.QtCompat.getCppPointer = _getcpppointer
if hasattr(Qt, "_QtUiTools"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtCore"):
Qt.__qt_version__ = Qt._QtCore.qVersion()
if hasattr(Qt, "_QtWidgets"):
Qt.QtCompat.setSectionResizeMode = \
Qt._QtWidgets.QHeaderView.setSectionResizeMode
_reassign_misplaced_members("PySide2")
_build_compatibility_members("PySide2")
def _pyside():
"""Initialise PySide"""
import PySide as module
extras = ["QtUiTools"]
try:
try:
# Before merge of PySide and shiboken
import shiboken
except ImportError:
# After merge of PySide and shiboken, May 2017
from PySide import shiboken
extras.append("shiboken")
except ImportError:
pass
_setup(module, extras)
Qt.__binding_version__ = module.__version__
if hasattr(Qt, "_shiboken"):
Qt.QtCompat.wrapInstance = _wrapinstance
Qt.QtCompat.getCppPointer = _getcpppointer
if hasattr(Qt, "_QtUiTools"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtGui"):
setattr(Qt, "QtWidgets", _new_module("QtWidgets"))
setattr(Qt, "_QtWidgets", Qt._QtGui)
if hasattr(Qt._QtGui, "QX11Info"):
setattr(Qt, "QtX11Extras", _new_module("QtX11Extras"))
Qt.QtX11Extras.QX11Info = Qt._QtGui.QX11Info
Qt.QtCompat.setSectionResizeMode = Qt._QtGui.QHeaderView.setResizeMode
if hasattr(Qt, "_QtCore"):
Qt.__qt_version__ = Qt._QtCore.qVersion()
_reassign_misplaced_members("PySide")
_build_compatibility_members("PySide")
def _pyqt4():
"""Initialise PyQt4"""
import sip
# Validation of envivornment variable. Prevents an error if
# the variable is invalid since it's just a hint.
try:
hint = int(QT_SIP_API_HINT)
except TypeError:
hint = None # Variable was None, i.e. not set.
except ValueError:
raise ImportError("QT_SIP_API_HINT=%s must be a 1 or 2")
for api in ("QString",
"QVariant",
"QDate",
"QDateTime",
"QTextStream",
"QTime",
"QUrl"):
try:
sip.setapi(api, hint or 2)
except AttributeError:
raise ImportError("PyQt4 < 4.6 isn't supported by Qt.py")
except ValueError:
actual = sip.getapi(api)
if not hint:
raise ImportError("API version already set to %d" % actual)
else:
# Having provided a hint indicates a soft constraint, one
# that doesn't throw an exception.
sys.stderr.write(
"Warning: API '%s' has already been set to %d.\n"
% (api, actual)
)
import PyQt4 as module
extras = ["uic"]
try:
import sip
extras.append(sip.__name__)
except ImportError:
sip = None
_setup(module, extras)
if hasattr(Qt, "_sip"):
Qt.QtCompat.wrapInstance = _wrapinstance
Qt.QtCompat.getCppPointer = _getcpppointer
if hasattr(Qt, "_uic"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtGui"):
setattr(Qt, "QtWidgets", _new_module("QtWidgets"))
setattr(Qt, "_QtWidgets", Qt._QtGui)
if hasattr(Qt._QtGui, "QX11Info"):
setattr(Qt, "QtX11Extras", _new_module("QtX11Extras"))
Qt.QtX11Extras.QX11Info = Qt._QtGui.QX11Info
Qt.QtCompat.setSectionResizeMode = \
Qt._QtGui.QHeaderView.setResizeMode
if hasattr(Qt, "_QtCore"):
Qt.__binding_version__ = Qt._QtCore.PYQT_VERSION_STR
Qt.__qt_version__ = Qt._QtCore.QT_VERSION_STR
_reassign_misplaced_members("PyQt4")
# QFileDialog QtCompat decorator
def _standardizeQFileDialog(some_function):
"""Decorator that makes PyQt4 return conform to other bindings"""
def wrapper(*args, **kwargs):
ret = (some_function(*args, **kwargs))
# PyQt4 only returns the selected filename, force it to a
# standard return of the selected filename, and a empty string
# for the selected filter
return ret, ''
wrapper.__doc__ = some_function.__doc__
wrapper.__name__ = some_function.__name__
return wrapper
decorators = {
"QFileDialog": {
"getOpenFileName": _standardizeQFileDialog,
"getOpenFileNames": _standardizeQFileDialog,
"getSaveFileName": _standardizeQFileDialog,
}
}
_build_compatibility_members('PyQt4', decorators)
def _none():
"""Internal option (used in installer)"""
Mock = type("Mock", (), {"__getattr__": lambda Qt, attr: None})
Qt.__binding__ = "None"
Qt.__qt_version__ = "0.0.0"
Qt.__binding_version__ = "0.0.0"
Qt.QtCompat.loadUi = lambda uifile, baseinstance=None: None
Qt.QtCompat.setSectionResizeMode = lambda *args, **kwargs: None
for submodule in _common_members.keys():
setattr(Qt, submodule, Mock())
setattr(Qt, "_" + submodule, Mock())
def _log(text):
if QT_VERBOSE:
sys.stdout.write(text + "\n")
def _convert(lines):
"""Convert compiled .ui file from PySide2 to Qt.py
Arguments:
lines (list): Each line of of .ui file
Usage:
>> with open("myui.py") as f:
.. lines = _convert(f.readlines())
"""
def parse(line):
line = line.replace("from PySide2 import", "from Qt import QtCompat,")
line = line.replace("QtWidgets.QApplication.translate",
"QtCompat.translate")
if "QtCore.SIGNAL" in line:
raise NotImplementedError("QtCore.SIGNAL is missing from PyQt5 "
"and so Qt.py does not support it: you "
"should avoid defining signals inside "
"your ui files.")
return line
parsed = list()
for line in lines:
line = parse(line)
parsed.append(line)
return parsed
def _cli(args):
"""Qt.py command-line interface"""
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--convert",
help="Path to compiled Python module, e.g. my_ui.py")
parser.add_argument("--compile",
help="Accept raw .ui file and compile with native "
"PySide2 compiler.")
parser.add_argument("--stdout",
help="Write to stdout instead of file",
action="store_true")
parser.add_argument("--stdin",
help="Read from stdin instead of file",
action="store_true")
args = parser.parse_args(args)
if args.stdout:
raise NotImplementedError("--stdout")
if args.stdin:
raise NotImplementedError("--stdin")
if args.compile:
raise NotImplementedError("--compile")
if args.convert:
sys.stdout.write("#\n"
"# WARNING: --convert is an ALPHA feature.\n#\n"
"# See https://github.com/mottosso/Qt.py/pull/132\n"
"# for details.\n"
"#\n")
#
# ------> Read
#
with open(args.convert) as f:
lines = _convert(f.readlines())
backup = "%s_backup%s" % os.path.splitext(args.convert)
sys.stdout.write("Creating \"%s\"..\n" % backup)
shutil.copy(args.convert, backup)
#
# <------ Write
#
with open(args.convert, "w") as f:
f.write("".join(lines))
sys.stdout.write("Successfully converted \"%s\"\n" % args.convert)
def _install():
# Default order (customise order and content via QT_PREFERRED_BINDING)
default_order = ("PySide2", "PyQt5", "PySide", "PyQt4")
preferred_order = list(
b for b in QT_PREFERRED_BINDING.split(os.pathsep) if b
)
order = preferred_order or default_order
available = {
"PySide2": _pyside2,
"PyQt5": _pyqt5,
"PySide": _pyside,
"PyQt4": _pyqt4,
"None": _none
}
_log("Order: '%s'" % "', '".join(order))
# Allow site-level customization of the available modules.
_apply_site_config()
found_binding = False
for name in order:
_log("Trying %s" % name)
try:
available[name]()
found_binding = True
break
except ImportError as e:
_log("ImportError: %s" % e)
except KeyError:
_log("ImportError: Preferred binding '%s' not found." % name)
if not found_binding:
# If not binding were found, throw this error
raise ImportError("No Qt binding were found.")
# Install individual members
for name, members in _common_members.items():
try:
their_submodule = getattr(Qt, "_%s" % name)
except AttributeError:
continue
our_submodule = getattr(Qt, name)
# Enable import *
__all__.append(name)
# Enable direct import of submodule,
# e.g. import Qt.QtCore
sys.modules[__name__ + "." + name] = our_submodule
for member in members:
# Accept that a submodule may miss certain members.
try:
their_member = getattr(their_submodule, member)
except AttributeError:
_log("'%s.%s' was missing." % (name, member))
continue
setattr(our_submodule, member, their_member)
# Enable direct import of QtCompat
sys.modules['Qt.QtCompat'] = Qt.QtCompat
# Backwards compatibility
if hasattr(Qt.QtCompat, 'loadUi'):
Qt.QtCompat.load_ui = Qt.QtCompat.loadUi
_install()
# Setup Binding Enum states
Qt.IsPySide2 = Qt.__binding__ == 'PySide2'
Qt.IsPyQt5 = Qt.__binding__ == 'PyQt5'
Qt.IsPySide = Qt.__binding__ == 'PySide'
Qt.IsPyQt4 = Qt.__binding__ == 'PyQt4'
"""Augment QtCompat
QtCompat contains wrappers and added functionality
to the original bindings, such as the CLI interface
and otherwise incompatible members between bindings,
such as `QHeaderView.setSectionResizeMode`.
"""
Qt.QtCompat._cli = _cli
Qt.QtCompat._convert = _convert
# Enable command-line interface
if __name__ == "__main__":
_cli(sys.argv[1:])
# The MIT License (MIT)
#
# Copyright (c) 2016-2017 Marcus Ottosson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# In PySide(2), loadUi does not exist, so we implement it
#
# `_UiLoader` is adapted from the qtpy project, which was further influenced
# by qt-helpers which was released under a 3-clause BSD license which in turn
# is based on a solution at:
#
# - https://gist.github.com/cpbotha/1b42a20c8f3eb9bb7cb8
#
# The License for this code is as follows:
#
# qt-helpers - a common front-end to various Qt modules
#
# Copyright (c) 2015, Chris Beaumont and Thomas Robitaille
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the
# distribution.
# * Neither the name of the Glue project nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Which itself was based on the solution at
#
# https://gist.github.com/cpbotha/1b42a20c8f3eb9bb7cb8
#
# which was released under the MIT license:
#
# Copyright (c) 2011 Sebastian Wiesner <lunaryorn@gmail.com>
# Modifications by Charl Botha <cpbotha@vxlabs.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files
# (the "Software"),to deal in the Software without restriction,
# including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
closeio/quotequail | quotequail/__init__.py | quote | python | def quote(text, limit=1000):
lines = text.split('\n')
found = _internal.find_quote_position(lines, _patterns.MAX_WRAP_LINES, limit)
if found != None:
return [(True, '\n'.join(lines[:found+1])), (False, '\n'.join(lines[found+1:]))]
return [(True, text)] | Takes a plain text message as an argument, returns a list of tuples. The
first argument of the tuple denotes whether the text should be expanded by
default. The second argument is the unmodified corresponding text.
Example: [(True, 'expanded text'), (False, '> Some quoted text')]
Unless the limit param is set to None, the text will automatically be quoted
starting at the line where the limit is reached. | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/__init__.py#L12-L31 | [
"def find_quote_position(lines, max_wrap_lines, limit=None):\n \"\"\"\n Returns the (ending) line number of a quoting pattern. If a limit is given\n and the limit is reached, the limit is returned.\n \"\"\"\n\n for n in range(len(lines)):\n end, typ = find_pattern_on_line(lines, n, max_wrap_li... | # -*- coding: utf-8 -*-
# quotequail
# a library that identifies quoted text in email messages
import re
from . import _internal
__all__ = ['quote', 'quote_html', 'unwrap', 'unwrap_html']
def quote_html(html, limit=1000):
"""
Like quote(), but takes an HTML message as an argument. The limit param
represents the maximum number of lines to traverse until quoting the rest
of the markup. Lines are separated by block elements or <br>.
"""
from . import _html
tree = _html.get_html_tree(html)
start_refs, end_refs, lines = _html.get_line_info(tree, limit+1)
found = _internal.find_quote_position(lines, 1, limit)
if found == None:
# No quoting found and we're below limit. We're done.
return [(True, _html.render_html_tree(tree))]
else:
start_tree = _html.slice_tree(tree, start_refs, end_refs,
(0, found+1), html_copy=html)
end_tree = _html.slice_tree(tree, start_refs, end_refs,
(found+1, None))
return [
(True, _html.render_html_tree(start_tree)),
(False, _html.render_html_tree(end_tree)),
]
def unwrap(text):
"""
If the passed text is the text body of a forwarded message, a reply, or
contains quoted text, a dictionary with the following keys is returned:
- type: "reply", "forward" or "quote"
- text_top: Text at the top of the passed message (if found)
- text_bottom: Text at the bottom of the passed message (if found)
- from / to / subject / cc / bcc / reply-to: Corresponding header of the
forwarded message, if it exists. (if found)
- text: Unindented text of the wrapped message (if found)
Otherwise, this function returns None.
"""
lines = text.split('\n')
result = _internal.unwrap(lines, _patterns.MAX_WRAP_LINES,
_patterns.MIN_HEADER_LINES,_patterns.MIN_QUOTED_LINES)
if result:
typ, top_range, hdrs, main_range, bottom_range, needs_unindent = result
text_top = lines[slice(*top_range)] if top_range else ''
text = lines[slice(*main_range)] if main_range else ''
text_bottom = lines[slice(*bottom_range)] if bottom_range else ''
if needs_unindent:
text = _internal.unindent_lines(text)
result = {
'type': typ,
}
text = '\n'.join(text).strip()
text_top = '\n'.join(text_top).strip()
text_bottom = '\n'.join(text_bottom).strip()
if text:
result['text'] = text
if text_top:
result['text_top'] = text_top
if text_bottom:
result['text_bottom'] = text_bottom
if hdrs:
result.update(hdrs)
return result
def unwrap_html(html):
"""
If the passed HTML is the HTML body of a forwarded message, a dictionary
with the following keys is returned:
- type: "reply", "forward" or "quote"
- html_top: HTML at the top of the passed message (if found)
- html_bottom: HTML at the bottom of the passed message (if found)
- from / to / subject / cc / bcc / reply-to: Corresponding header of the
forwarded message, if it exists. (if found)
- html: HTML of the forwarded message (if found)
Otherwise, this function returns None.
"""
from . import _html
tree = _html.get_html_tree(html)
start_refs, end_refs, lines = _html.get_line_info(tree)
result = _internal.unwrap(lines, 1, _patterns.MIN_HEADER_LINES, 1)
if result:
typ, top_range, hdrs, main_range, bottom_range, needs_unindent = result
result = {
'type': typ,
}
top_range = _html.trim_slice(lines, top_range)
main_range = _html.trim_slice(lines, main_range)
bottom_range = _html.trim_slice(lines, bottom_range)
if top_range:
top_tree = _html.slice_tree(tree, start_refs, end_refs, top_range,
html_copy=html)
html_top = _html.render_html_tree(top_tree)
if html_top:
result['html_top'] = html_top
if bottom_range:
bottom_tree = _html.slice_tree(tree, start_refs, end_refs,
bottom_range, html_copy=html)
html_bottom = _html.render_html_tree(bottom_tree)
if html_bottom:
result['html_bottom'] = html_bottom
if main_range:
main_tree = _html.slice_tree(tree, start_refs, end_refs, main_range)
if needs_unindent:
_html.unindent_tree(main_tree)
html = _html.render_html_tree(main_tree)
if html:
result['html'] = html
if hdrs:
result.update(hdrs)
return result
|
closeio/quotequail | quotequail/__init__.py | quote_html | python | def quote_html(html, limit=1000):
from . import _html
tree = _html.get_html_tree(html)
start_refs, end_refs, lines = _html.get_line_info(tree, limit+1)
found = _internal.find_quote_position(lines, 1, limit)
if found == None:
# No quoting found and we're below limit. We're done.
return [(True, _html.render_html_tree(tree))]
else:
start_tree = _html.slice_tree(tree, start_refs, end_refs,
(0, found+1), html_copy=html)
end_tree = _html.slice_tree(tree, start_refs, end_refs,
(found+1, None))
return [
(True, _html.render_html_tree(start_tree)),
(False, _html.render_html_tree(end_tree)),
] | Like quote(), but takes an HTML message as an argument. The limit param
represents the maximum number of lines to traverse until quoting the rest
of the markup. Lines are separated by block elements or <br>. | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/__init__.py#L33-L59 | [
"def find_quote_position(lines, max_wrap_lines, limit=None):\n \"\"\"\n Returns the (ending) line number of a quoting pattern. If a limit is given\n and the limit is reached, the limit is returned.\n \"\"\"\n\n for n in range(len(lines)):\n end, typ = find_pattern_on_line(lines, n, max_wrap_li... | # -*- coding: utf-8 -*-
# quotequail
# a library that identifies quoted text in email messages
import re
from . import _internal
__all__ = ['quote', 'quote_html', 'unwrap', 'unwrap_html']
def quote(text, limit=1000):
"""
Takes a plain text message as an argument, returns a list of tuples. The
first argument of the tuple denotes whether the text should be expanded by
default. The second argument is the unmodified corresponding text.
Example: [(True, 'expanded text'), (False, '> Some quoted text')]
Unless the limit param is set to None, the text will automatically be quoted
starting at the line where the limit is reached.
"""
lines = text.split('\n')
found = _internal.find_quote_position(lines, _patterns.MAX_WRAP_LINES, limit)
if found != None:
return [(True, '\n'.join(lines[:found+1])), (False, '\n'.join(lines[found+1:]))]
return [(True, text)]
def unwrap(text):
"""
If the passed text is the text body of a forwarded message, a reply, or
contains quoted text, a dictionary with the following keys is returned:
- type: "reply", "forward" or "quote"
- text_top: Text at the top of the passed message (if found)
- text_bottom: Text at the bottom of the passed message (if found)
- from / to / subject / cc / bcc / reply-to: Corresponding header of the
forwarded message, if it exists. (if found)
- text: Unindented text of the wrapped message (if found)
Otherwise, this function returns None.
"""
lines = text.split('\n')
result = _internal.unwrap(lines, _patterns.MAX_WRAP_LINES,
_patterns.MIN_HEADER_LINES,_patterns.MIN_QUOTED_LINES)
if result:
typ, top_range, hdrs, main_range, bottom_range, needs_unindent = result
text_top = lines[slice(*top_range)] if top_range else ''
text = lines[slice(*main_range)] if main_range else ''
text_bottom = lines[slice(*bottom_range)] if bottom_range else ''
if needs_unindent:
text = _internal.unindent_lines(text)
result = {
'type': typ,
}
text = '\n'.join(text).strip()
text_top = '\n'.join(text_top).strip()
text_bottom = '\n'.join(text_bottom).strip()
if text:
result['text'] = text
if text_top:
result['text_top'] = text_top
if text_bottom:
result['text_bottom'] = text_bottom
if hdrs:
result.update(hdrs)
return result
def unwrap_html(html):
"""
If the passed HTML is the HTML body of a forwarded message, a dictionary
with the following keys is returned:
- type: "reply", "forward" or "quote"
- html_top: HTML at the top of the passed message (if found)
- html_bottom: HTML at the bottom of the passed message (if found)
- from / to / subject / cc / bcc / reply-to: Corresponding header of the
forwarded message, if it exists. (if found)
- html: HTML of the forwarded message (if found)
Otherwise, this function returns None.
"""
from . import _html
tree = _html.get_html_tree(html)
start_refs, end_refs, lines = _html.get_line_info(tree)
result = _internal.unwrap(lines, 1, _patterns.MIN_HEADER_LINES, 1)
if result:
typ, top_range, hdrs, main_range, bottom_range, needs_unindent = result
result = {
'type': typ,
}
top_range = _html.trim_slice(lines, top_range)
main_range = _html.trim_slice(lines, main_range)
bottom_range = _html.trim_slice(lines, bottom_range)
if top_range:
top_tree = _html.slice_tree(tree, start_refs, end_refs, top_range,
html_copy=html)
html_top = _html.render_html_tree(top_tree)
if html_top:
result['html_top'] = html_top
if bottom_range:
bottom_tree = _html.slice_tree(tree, start_refs, end_refs,
bottom_range, html_copy=html)
html_bottom = _html.render_html_tree(bottom_tree)
if html_bottom:
result['html_bottom'] = html_bottom
if main_range:
main_tree = _html.slice_tree(tree, start_refs, end_refs, main_range)
if needs_unindent:
_html.unindent_tree(main_tree)
html = _html.render_html_tree(main_tree)
if html:
result['html'] = html
if hdrs:
result.update(hdrs)
return result
|
closeio/quotequail | quotequail/__init__.py | unwrap | python | def unwrap(text):
lines = text.split('\n')
result = _internal.unwrap(lines, _patterns.MAX_WRAP_LINES,
_patterns.MIN_HEADER_LINES,_patterns.MIN_QUOTED_LINES)
if result:
typ, top_range, hdrs, main_range, bottom_range, needs_unindent = result
text_top = lines[slice(*top_range)] if top_range else ''
text = lines[slice(*main_range)] if main_range else ''
text_bottom = lines[slice(*bottom_range)] if bottom_range else ''
if needs_unindent:
text = _internal.unindent_lines(text)
result = {
'type': typ,
}
text = '\n'.join(text).strip()
text_top = '\n'.join(text_top).strip()
text_bottom = '\n'.join(text_bottom).strip()
if text:
result['text'] = text
if text_top:
result['text_top'] = text_top
if text_bottom:
result['text_bottom'] = text_bottom
if hdrs:
result.update(hdrs)
return result | If the passed text is the text body of a forwarded message, a reply, or
contains quoted text, a dictionary with the following keys is returned:
- type: "reply", "forward" or "quote"
- text_top: Text at the top of the passed message (if found)
- text_bottom: Text at the bottom of the passed message (if found)
- from / to / subject / cc / bcc / reply-to: Corresponding header of the
forwarded message, if it exists. (if found)
- text: Unindented text of the wrapped message (if found)
Otherwise, this function returns None. | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/__init__.py#L61-L108 | [
"def unwrap(lines, max_wrap_lines, min_header_lines, min_quoted_lines):\n \"\"\"\n Returns a tuple of:\n - Type ('forward', 'reply', 'headers', 'quoted')\n - Range of the text at the top of the wrapped message (or None)\n - Headers dict (or None)\n - Range of the text of the wrapped message (or No... | # -*- coding: utf-8 -*-
# quotequail
# a library that identifies quoted text in email messages
import re
from . import _internal
__all__ = ['quote', 'quote_html', 'unwrap', 'unwrap_html']
def quote(text, limit=1000):
"""
Takes a plain text message as an argument, returns a list of tuples. The
first argument of the tuple denotes whether the text should be expanded by
default. The second argument is the unmodified corresponding text.
Example: [(True, 'expanded text'), (False, '> Some quoted text')]
Unless the limit param is set to None, the text will automatically be quoted
starting at the line where the limit is reached.
"""
lines = text.split('\n')
found = _internal.find_quote_position(lines, _patterns.MAX_WRAP_LINES, limit)
if found != None:
return [(True, '\n'.join(lines[:found+1])), (False, '\n'.join(lines[found+1:]))]
return [(True, text)]
def quote_html(html, limit=1000):
"""
Like quote(), but takes an HTML message as an argument. The limit param
represents the maximum number of lines to traverse until quoting the rest
of the markup. Lines are separated by block elements or <br>.
"""
from . import _html
tree = _html.get_html_tree(html)
start_refs, end_refs, lines = _html.get_line_info(tree, limit+1)
found = _internal.find_quote_position(lines, 1, limit)
if found == None:
# No quoting found and we're below limit. We're done.
return [(True, _html.render_html_tree(tree))]
else:
start_tree = _html.slice_tree(tree, start_refs, end_refs,
(0, found+1), html_copy=html)
end_tree = _html.slice_tree(tree, start_refs, end_refs,
(found+1, None))
return [
(True, _html.render_html_tree(start_tree)),
(False, _html.render_html_tree(end_tree)),
]
def unwrap_html(html):
"""
If the passed HTML is the HTML body of a forwarded message, a dictionary
with the following keys is returned:
- type: "reply", "forward" or "quote"
- html_top: HTML at the top of the passed message (if found)
- html_bottom: HTML at the bottom of the passed message (if found)
- from / to / subject / cc / bcc / reply-to: Corresponding header of the
forwarded message, if it exists. (if found)
- html: HTML of the forwarded message (if found)
Otherwise, this function returns None.
"""
from . import _html
tree = _html.get_html_tree(html)
start_refs, end_refs, lines = _html.get_line_info(tree)
result = _internal.unwrap(lines, 1, _patterns.MIN_HEADER_LINES, 1)
if result:
typ, top_range, hdrs, main_range, bottom_range, needs_unindent = result
result = {
'type': typ,
}
top_range = _html.trim_slice(lines, top_range)
main_range = _html.trim_slice(lines, main_range)
bottom_range = _html.trim_slice(lines, bottom_range)
if top_range:
top_tree = _html.slice_tree(tree, start_refs, end_refs, top_range,
html_copy=html)
html_top = _html.render_html_tree(top_tree)
if html_top:
result['html_top'] = html_top
if bottom_range:
bottom_tree = _html.slice_tree(tree, start_refs, end_refs,
bottom_range, html_copy=html)
html_bottom = _html.render_html_tree(bottom_tree)
if html_bottom:
result['html_bottom'] = html_bottom
if main_range:
main_tree = _html.slice_tree(tree, start_refs, end_refs, main_range)
if needs_unindent:
_html.unindent_tree(main_tree)
html = _html.render_html_tree(main_tree)
if html:
result['html'] = html
if hdrs:
result.update(hdrs)
return result
|
closeio/quotequail | quotequail/__init__.py | unwrap_html | python | def unwrap_html(html):
from . import _html
tree = _html.get_html_tree(html)
start_refs, end_refs, lines = _html.get_line_info(tree)
result = _internal.unwrap(lines, 1, _patterns.MIN_HEADER_LINES, 1)
if result:
typ, top_range, hdrs, main_range, bottom_range, needs_unindent = result
result = {
'type': typ,
}
top_range = _html.trim_slice(lines, top_range)
main_range = _html.trim_slice(lines, main_range)
bottom_range = _html.trim_slice(lines, bottom_range)
if top_range:
top_tree = _html.slice_tree(tree, start_refs, end_refs, top_range,
html_copy=html)
html_top = _html.render_html_tree(top_tree)
if html_top:
result['html_top'] = html_top
if bottom_range:
bottom_tree = _html.slice_tree(tree, start_refs, end_refs,
bottom_range, html_copy=html)
html_bottom = _html.render_html_tree(bottom_tree)
if html_bottom:
result['html_bottom'] = html_bottom
if main_range:
main_tree = _html.slice_tree(tree, start_refs, end_refs, main_range)
if needs_unindent:
_html.unindent_tree(main_tree)
html = _html.render_html_tree(main_tree)
if html:
result['html'] = html
if hdrs:
result.update(hdrs)
return result | If the passed HTML is the HTML body of a forwarded message, a dictionary
with the following keys is returned:
- type: "reply", "forward" or "quote"
- html_top: HTML at the top of the passed message (if found)
- html_bottom: HTML at the bottom of the passed message (if found)
- from / to / subject / cc / bcc / reply-to: Corresponding header of the
forwarded message, if it exists. (if found)
- html: HTML of the forwarded message (if found)
Otherwise, this function returns None. | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/__init__.py#L110-L168 | [
"def unwrap(lines, max_wrap_lines, min_header_lines, min_quoted_lines):\n \"\"\"\n Returns a tuple of:\n - Type ('forward', 'reply', 'headers', 'quoted')\n - Range of the text at the top of the wrapped message (or None)\n - Headers dict (or None)\n - Range of the text of the wrapped message (or No... | # -*- coding: utf-8 -*-
# quotequail
# a library that identifies quoted text in email messages
import re
from . import _internal
__all__ = ['quote', 'quote_html', 'unwrap', 'unwrap_html']
def quote(text, limit=1000):
"""
Takes a plain text message as an argument, returns a list of tuples. The
first argument of the tuple denotes whether the text should be expanded by
default. The second argument is the unmodified corresponding text.
Example: [(True, 'expanded text'), (False, '> Some quoted text')]
Unless the limit param is set to None, the text will automatically be quoted
starting at the line where the limit is reached.
"""
lines = text.split('\n')
found = _internal.find_quote_position(lines, _patterns.MAX_WRAP_LINES, limit)
if found != None:
return [(True, '\n'.join(lines[:found+1])), (False, '\n'.join(lines[found+1:]))]
return [(True, text)]
def quote_html(html, limit=1000):
"""
Like quote(), but takes an HTML message as an argument. The limit param
represents the maximum number of lines to traverse until quoting the rest
of the markup. Lines are separated by block elements or <br>.
"""
from . import _html
tree = _html.get_html_tree(html)
start_refs, end_refs, lines = _html.get_line_info(tree, limit+1)
found = _internal.find_quote_position(lines, 1, limit)
if found == None:
# No quoting found and we're below limit. We're done.
return [(True, _html.render_html_tree(tree))]
else:
start_tree = _html.slice_tree(tree, start_refs, end_refs,
(0, found+1), html_copy=html)
end_tree = _html.slice_tree(tree, start_refs, end_refs,
(found+1, None))
return [
(True, _html.render_html_tree(start_tree)),
(False, _html.render_html_tree(end_tree)),
]
def unwrap(text):
"""
If the passed text is the text body of a forwarded message, a reply, or
contains quoted text, a dictionary with the following keys is returned:
- type: "reply", "forward" or "quote"
- text_top: Text at the top of the passed message (if found)
- text_bottom: Text at the bottom of the passed message (if found)
- from / to / subject / cc / bcc / reply-to: Corresponding header of the
forwarded message, if it exists. (if found)
- text: Unindented text of the wrapped message (if found)
Otherwise, this function returns None.
"""
lines = text.split('\n')
result = _internal.unwrap(lines, _patterns.MAX_WRAP_LINES,
_patterns.MIN_HEADER_LINES,_patterns.MIN_QUOTED_LINES)
if result:
typ, top_range, hdrs, main_range, bottom_range, needs_unindent = result
text_top = lines[slice(*top_range)] if top_range else ''
text = lines[slice(*main_range)] if main_range else ''
text_bottom = lines[slice(*bottom_range)] if bottom_range else ''
if needs_unindent:
text = _internal.unindent_lines(text)
result = {
'type': typ,
}
text = '\n'.join(text).strip()
text_top = '\n'.join(text_top).strip()
text_bottom = '\n'.join(text_bottom).strip()
if text:
result['text'] = text
if text_top:
result['text_top'] = text_top
if text_bottom:
result['text_bottom'] = text_bottom
if hdrs:
result.update(hdrs)
return result
|
closeio/quotequail | quotequail/_internal.py | find_pattern_on_line | python | def find_pattern_on_line(lines, n, max_wrap_lines):
for typ, regexes in COMPILED_PATTERN_MAP.items():
for regex in regexes:
for m in range(max_wrap_lines):
match_line = join_wrapped_lines(lines[n:n+1+m])
if match_line.startswith('>'):
match_line = match_line[1:].strip()
if regex.match(match_line.strip()):
return n+m, typ
return None, None | Finds a forward/reply pattern within the given lines on text on the given
line number and returns a tuple with the type ('reply' or 'forward') and
line number of where the pattern ends. The returned line number may be
different from the given line number in case the pattern wraps over
multiple lines.
Returns (None, None) if no pattern was found. | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_internal.py#L9-L27 | [
"def join_wrapped_lines(lines):\n \"\"\"\n Join one or multiple lines that wrapped. Returns the reconstructed line.\n Takes into account proper spacing between the lines (see\n STRIP_SPACE_CHARS).\n \"\"\"\n if len(lines) == 1:\n return lines[0]\n\n joined = lines[0]\n for line in lin... | import re
from ._patterns import COMPILED_PATTERNS, COMPILED_PATTERN_MAP, HEADER_RE, HEADER_MAP, REPLY_DATE_SPLIT_REGEX, STRIP_SPACE_CHARS
"""
Internal methods. For max_wrap_lines, min_header_lines, min_quoted_lines
documentation see the corresponding constants in _patterns.py.
"""
def find_quote_position(lines, max_wrap_lines, limit=None):
"""
Returns the (ending) line number of a quoting pattern. If a limit is given
and the limit is reached, the limit is returned.
"""
for n in range(len(lines)):
end, typ = find_pattern_on_line(lines, n, max_wrap_lines)
if typ:
return end
if limit != None and n >= limit-1:
return n
return None
def join_wrapped_lines(lines):
"""
Join one or multiple lines that wrapped. Returns the reconstructed line.
Takes into account proper spacing between the lines (see
STRIP_SPACE_CHARS).
"""
if len(lines) == 1:
return lines[0]
joined = lines[0]
for line in lines[1:]:
if joined and joined[-1] in STRIP_SPACE_CHARS:
joined += line
else:
joined += ' '
joined += line
return joined
def extract_headers(lines, max_wrap_lines):
"""
Extracts email headers from the given lines. Returns a dict with the
detected headers and the amount of lines that were processed.
"""
hdrs = {}
header_name = None
# Track overlong headers that extend over multiple lines
extend_lines = 0
lines_processed = 0
for n, line in enumerate(lines):
if not line.strip():
header_name = None
continue
match = HEADER_RE.match(line)
if match:
header_name, header_value = match.groups()
header_name = header_name.strip().lower()
extend_lines = 0
if header_name in HEADER_MAP:
hdrs[HEADER_MAP[header_name]] = header_value.strip()
lines_processed = n+1
else:
extend_lines += 1
if extend_lines < max_wrap_lines and header_name in HEADER_MAP:
hdrs[HEADER_MAP[header_name]] = join_wrapped_lines(
[hdrs[HEADER_MAP[header_name]], line.strip()])
lines_processed = n+1
else:
# no more headers found
break
return hdrs, lines_processed
def parse_reply(line):
"""
Parses the given reply line ("On DATE, USER wrote:") and returns a
dictionary with the "Date" and "From" keys, or None, if couldn't parse.
"""
if line.startswith('>'):
line = line[1:].strip()
date = user = None
for pattern in COMPILED_PATTERN_MAP['reply']:
match = pattern.match(line)
if match:
groups = match.groups()
if len(groups) == 2:
# We're lucky and got both date and user split up.
date, user = groups
else:
split_match = REPLY_DATE_SPLIT_REGEX.match(groups[0])
if split_match:
split_groups = split_match.groups()
date = split_groups[0]
user = split_groups[-1]
else:
# Try a simple comma split
split = groups[0].rsplit(',', 1)
if len(split) == 2:
date, user = split
if date:
date = date.strip()
if user:
user = user.strip()
if date and user:
return {
'date': date.strip(),
'from': user.strip(),
}
def find_unwrap_start(lines, max_wrap_lines, min_header_lines, min_quoted_lines):
"""
Finds the starting point of a wrapped email. Returns a tuple containing
(start_line_number, end_line_number, type), where type can be one of the
following:
* 'forward': A matching forwarding pattern was found
* 'reply': A matching reply pattern was found
* 'headers': Headers were found (usually a forwarded email)
* 'quote': A quote was found
start_line_number corresponds to the line number where the forwarding/reply
pattern starts, or where the headers/quote starts. end_line_number is only
different from start_line_number if the forwarding/reply pattern spans over
multiple lines (it does not extend to the end of the headers or of the
quoted section).
Returns (None, None, None) if nothing was found.
"""
for n, line in enumerate(lines):
if not line.strip():
continue
# Find a forward / reply start pattern
end, typ = find_pattern_on_line(lines, n, max_wrap_lines)
if typ:
return n, end, typ
# Find a quote
if line.startswith('>'):
# Check if there are at least min_quoted_lines lines that match
matched_lines = 1
if matched_lines >= min_quoted_lines:
return n, n, 'quoted'
for peek_line in lines[n+1:]:
if not peek_line.strip():
continue
if not peek_line.startswith('>'):
break
else:
matched_lines += 1
if matched_lines >= min_quoted_lines:
return n, n, 'quoted'
# Find a header
match = HEADER_RE.match(line)
if match:
if len(extract_headers(lines[n:], max_wrap_lines)[0]) >= min_header_lines:
return n, n, 'headers'
return None, None, None
def unindent_lines(lines):
unquoted = []
for n, line in enumerate(lines):
if line.startswith('> '):
unquoted.append(line[2:])
elif line.startswith('>'):
unquoted.append(line[1:])
else:
break
return unquoted
def unwrap(lines, max_wrap_lines, min_header_lines, min_quoted_lines):
"""
Returns a tuple of:
- Type ('forward', 'reply', 'headers', 'quoted')
- Range of the text at the top of the wrapped message (or None)
- Headers dict (or None)
- Range of the text of the wrapped message (or None)
- Range of the text below the wrapped message (or None)
- Whether the wrapped text needs to be unindented
"""
headers = {}
# Get line number and wrapping type.
start, end, typ = find_unwrap_start(lines, max_wrap_lines, min_header_lines, min_quoted_lines)
# We found a line indicating that it's a forward/reply.
if typ in ('forward', 'reply'):
main_type = typ
if typ == 'reply':
reply_headers = parse_reply(join_wrapped_lines(lines[start:end+1]))
if reply_headers:
headers.update(reply_headers)
# Find where the headers or the quoted section starts.
# We can set min_quoted_lines to 1 because we expect a quoted section.
start2, end2, typ = find_unwrap_start(lines[end+1:], max_wrap_lines, min_header_lines, 1)
if typ == 'quoted':
# Quoted section starts. Unindent and check if there are headers.
quoted_start = end+1+start2
unquoted = unindent_lines(lines[quoted_start:])
rest_start = quoted_start + len(unquoted)
start3, end3, typ = find_unwrap_start(unquoted, max_wrap_lines, min_header_lines, min_quoted_lines)
if typ == 'headers':
hdrs, hdrs_length = extract_headers(unquoted[start3:], max_wrap_lines)
if hdrs:
headers.update(hdrs)
rest2_start = quoted_start+start3+hdrs_length
return main_type, (0, start), headers, (rest2_start, rest_start), (rest_start, None), True
else:
return main_type, (0, start), headers, (quoted_start, rest_start), (rest_start, None), True
elif typ == 'headers':
hdrs, hdrs_length = extract_headers(lines[start+1:], max_wrap_lines)
if hdrs:
headers.update(hdrs)
rest_start = start + 1 + hdrs_length
return main_type, (0, start), headers, (rest_start, None), None, False
else:
# Didn't find quoted section or headers, assume that everything
# below is the qouted text.
return main_type, (0, start), headers, (start+(start2 or 0)+1, None), None, False
# We just found headers, which usually indicates a forwarding.
elif typ == 'headers':
main_type = 'forward'
hdrs, hdrs_length = extract_headers(lines[start:], max_wrap_lines)
rest_start = start + hdrs_length
return main_type, (0, start), hdrs, (rest_start, None), None, False
# We found quoted text. Headers may be within the quoted text.
elif typ == 'quoted':
unquoted = unindent_lines(lines[start:])
rest_start = start + len(unquoted)
start2, end2, typ = find_unwrap_start(unquoted, max_wrap_lines, min_header_lines, min_quoted_lines)
if typ == 'headers':
main_type = 'forward'
hdrs, hdrs_length = extract_headers(unquoted[start2:], max_wrap_lines)
rest2_start = start + hdrs_length
return main_type, (0, start), hdrs, (rest2_start, rest_start), (rest_start, None), True
else:
main_type = 'quote'
return main_type, (None, start), None, (start, rest_start), (rest_start, None), True
|
closeio/quotequail | quotequail/_internal.py | find_quote_position | python | def find_quote_position(lines, max_wrap_lines, limit=None):
for n in range(len(lines)):
end, typ = find_pattern_on_line(lines, n, max_wrap_lines)
if typ:
return end
if limit != None and n >= limit-1:
return n
return None | Returns the (ending) line number of a quoting pattern. If a limit is given
and the limit is reached, the limit is returned. | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_internal.py#L29-L42 | [
"def find_pattern_on_line(lines, n, max_wrap_lines):\n \"\"\"\n Finds a forward/reply pattern within the given lines on text on the given\n line number and returns a tuple with the type ('reply' or 'forward') and\n line number of where the pattern ends. The returned line number may be\n different fro... | import re
from ._patterns import COMPILED_PATTERNS, COMPILED_PATTERN_MAP, HEADER_RE, HEADER_MAP, REPLY_DATE_SPLIT_REGEX, STRIP_SPACE_CHARS
"""
Internal methods. For max_wrap_lines, min_header_lines, min_quoted_lines
documentation see the corresponding constants in _patterns.py.
"""
def find_pattern_on_line(lines, n, max_wrap_lines):
"""
Finds a forward/reply pattern within the given lines on text on the given
line number and returns a tuple with the type ('reply' or 'forward') and
line number of where the pattern ends. The returned line number may be
different from the given line number in case the pattern wraps over
multiple lines.
Returns (None, None) if no pattern was found.
"""
for typ, regexes in COMPILED_PATTERN_MAP.items():
for regex in regexes:
for m in range(max_wrap_lines):
match_line = join_wrapped_lines(lines[n:n+1+m])
if match_line.startswith('>'):
match_line = match_line[1:].strip()
if regex.match(match_line.strip()):
return n+m, typ
return None, None
def join_wrapped_lines(lines):
"""
Join one or multiple lines that wrapped. Returns the reconstructed line.
Takes into account proper spacing between the lines (see
STRIP_SPACE_CHARS).
"""
if len(lines) == 1:
return lines[0]
joined = lines[0]
for line in lines[1:]:
if joined and joined[-1] in STRIP_SPACE_CHARS:
joined += line
else:
joined += ' '
joined += line
return joined
def extract_headers(lines, max_wrap_lines):
"""
Extracts email headers from the given lines. Returns a dict with the
detected headers and the amount of lines that were processed.
"""
hdrs = {}
header_name = None
# Track overlong headers that extend over multiple lines
extend_lines = 0
lines_processed = 0
for n, line in enumerate(lines):
if not line.strip():
header_name = None
continue
match = HEADER_RE.match(line)
if match:
header_name, header_value = match.groups()
header_name = header_name.strip().lower()
extend_lines = 0
if header_name in HEADER_MAP:
hdrs[HEADER_MAP[header_name]] = header_value.strip()
lines_processed = n+1
else:
extend_lines += 1
if extend_lines < max_wrap_lines and header_name in HEADER_MAP:
hdrs[HEADER_MAP[header_name]] = join_wrapped_lines(
[hdrs[HEADER_MAP[header_name]], line.strip()])
lines_processed = n+1
else:
# no more headers found
break
return hdrs, lines_processed
def parse_reply(line):
"""
Parses the given reply line ("On DATE, USER wrote:") and returns a
dictionary with the "Date" and "From" keys, or None, if couldn't parse.
"""
if line.startswith('>'):
line = line[1:].strip()
date = user = None
for pattern in COMPILED_PATTERN_MAP['reply']:
match = pattern.match(line)
if match:
groups = match.groups()
if len(groups) == 2:
# We're lucky and got both date and user split up.
date, user = groups
else:
split_match = REPLY_DATE_SPLIT_REGEX.match(groups[0])
if split_match:
split_groups = split_match.groups()
date = split_groups[0]
user = split_groups[-1]
else:
# Try a simple comma split
split = groups[0].rsplit(',', 1)
if len(split) == 2:
date, user = split
if date:
date = date.strip()
if user:
user = user.strip()
if date and user:
return {
'date': date.strip(),
'from': user.strip(),
}
def find_unwrap_start(lines, max_wrap_lines, min_header_lines, min_quoted_lines):
"""
Finds the starting point of a wrapped email. Returns a tuple containing
(start_line_number, end_line_number, type), where type can be one of the
following:
* 'forward': A matching forwarding pattern was found
* 'reply': A matching reply pattern was found
* 'headers': Headers were found (usually a forwarded email)
* 'quote': A quote was found
start_line_number corresponds to the line number where the forwarding/reply
pattern starts, or where the headers/quote starts. end_line_number is only
different from start_line_number if the forwarding/reply pattern spans over
multiple lines (it does not extend to the end of the headers or of the
quoted section).
Returns (None, None, None) if nothing was found.
"""
for n, line in enumerate(lines):
if not line.strip():
continue
# Find a forward / reply start pattern
end, typ = find_pattern_on_line(lines, n, max_wrap_lines)
if typ:
return n, end, typ
# Find a quote
if line.startswith('>'):
# Check if there are at least min_quoted_lines lines that match
matched_lines = 1
if matched_lines >= min_quoted_lines:
return n, n, 'quoted'
for peek_line in lines[n+1:]:
if not peek_line.strip():
continue
if not peek_line.startswith('>'):
break
else:
matched_lines += 1
if matched_lines >= min_quoted_lines:
return n, n, 'quoted'
# Find a header
match = HEADER_RE.match(line)
if match:
if len(extract_headers(lines[n:], max_wrap_lines)[0]) >= min_header_lines:
return n, n, 'headers'
return None, None, None
def unindent_lines(lines):
unquoted = []
for n, line in enumerate(lines):
if line.startswith('> '):
unquoted.append(line[2:])
elif line.startswith('>'):
unquoted.append(line[1:])
else:
break
return unquoted
def unwrap(lines, max_wrap_lines, min_header_lines, min_quoted_lines):
"""
Returns a tuple of:
- Type ('forward', 'reply', 'headers', 'quoted')
- Range of the text at the top of the wrapped message (or None)
- Headers dict (or None)
- Range of the text of the wrapped message (or None)
- Range of the text below the wrapped message (or None)
- Whether the wrapped text needs to be unindented
"""
headers = {}
# Get line number and wrapping type.
start, end, typ = find_unwrap_start(lines, max_wrap_lines, min_header_lines, min_quoted_lines)
# We found a line indicating that it's a forward/reply.
if typ in ('forward', 'reply'):
main_type = typ
if typ == 'reply':
reply_headers = parse_reply(join_wrapped_lines(lines[start:end+1]))
if reply_headers:
headers.update(reply_headers)
# Find where the headers or the quoted section starts.
# We can set min_quoted_lines to 1 because we expect a quoted section.
start2, end2, typ = find_unwrap_start(lines[end+1:], max_wrap_lines, min_header_lines, 1)
if typ == 'quoted':
# Quoted section starts. Unindent and check if there are headers.
quoted_start = end+1+start2
unquoted = unindent_lines(lines[quoted_start:])
rest_start = quoted_start + len(unquoted)
start3, end3, typ = find_unwrap_start(unquoted, max_wrap_lines, min_header_lines, min_quoted_lines)
if typ == 'headers':
hdrs, hdrs_length = extract_headers(unquoted[start3:], max_wrap_lines)
if hdrs:
headers.update(hdrs)
rest2_start = quoted_start+start3+hdrs_length
return main_type, (0, start), headers, (rest2_start, rest_start), (rest_start, None), True
else:
return main_type, (0, start), headers, (quoted_start, rest_start), (rest_start, None), True
elif typ == 'headers':
hdrs, hdrs_length = extract_headers(lines[start+1:], max_wrap_lines)
if hdrs:
headers.update(hdrs)
rest_start = start + 1 + hdrs_length
return main_type, (0, start), headers, (rest_start, None), None, False
else:
# Didn't find quoted section or headers, assume that everything
# below is the qouted text.
return main_type, (0, start), headers, (start+(start2 or 0)+1, None), None, False
# We just found headers, which usually indicates a forwarding.
elif typ == 'headers':
main_type = 'forward'
hdrs, hdrs_length = extract_headers(lines[start:], max_wrap_lines)
rest_start = start + hdrs_length
return main_type, (0, start), hdrs, (rest_start, None), None, False
# We found quoted text. Headers may be within the quoted text.
elif typ == 'quoted':
unquoted = unindent_lines(lines[start:])
rest_start = start + len(unquoted)
start2, end2, typ = find_unwrap_start(unquoted, max_wrap_lines, min_header_lines, min_quoted_lines)
if typ == 'headers':
main_type = 'forward'
hdrs, hdrs_length = extract_headers(unquoted[start2:], max_wrap_lines)
rest2_start = start + hdrs_length
return main_type, (0, start), hdrs, (rest2_start, rest_start), (rest_start, None), True
else:
main_type = 'quote'
return main_type, (None, start), None, (start, rest_start), (rest_start, None), True
|
closeio/quotequail | quotequail/_internal.py | join_wrapped_lines | python | def join_wrapped_lines(lines):
if len(lines) == 1:
return lines[0]
joined = lines[0]
for line in lines[1:]:
if joined and joined[-1] in STRIP_SPACE_CHARS:
joined += line
else:
joined += ' '
joined += line
return joined | Join one or multiple lines that wrapped. Returns the reconstructed line.
Takes into account proper spacing between the lines (see
STRIP_SPACE_CHARS). | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_internal.py#L44-L61 | null | import re
from ._patterns import COMPILED_PATTERNS, COMPILED_PATTERN_MAP, HEADER_RE, HEADER_MAP, REPLY_DATE_SPLIT_REGEX, STRIP_SPACE_CHARS
"""
Internal methods. For max_wrap_lines, min_header_lines, min_quoted_lines
documentation see the corresponding constants in _patterns.py.
"""
def find_pattern_on_line(lines, n, max_wrap_lines):
"""
Finds a forward/reply pattern within the given lines on text on the given
line number and returns a tuple with the type ('reply' or 'forward') and
line number of where the pattern ends. The returned line number may be
different from the given line number in case the pattern wraps over
multiple lines.
Returns (None, None) if no pattern was found.
"""
for typ, regexes in COMPILED_PATTERN_MAP.items():
for regex in regexes:
for m in range(max_wrap_lines):
match_line = join_wrapped_lines(lines[n:n+1+m])
if match_line.startswith('>'):
match_line = match_line[1:].strip()
if regex.match(match_line.strip()):
return n+m, typ
return None, None
def find_quote_position(lines, max_wrap_lines, limit=None):
"""
Returns the (ending) line number of a quoting pattern. If a limit is given
and the limit is reached, the limit is returned.
"""
for n in range(len(lines)):
end, typ = find_pattern_on_line(lines, n, max_wrap_lines)
if typ:
return end
if limit != None and n >= limit-1:
return n
return None
def extract_headers(lines, max_wrap_lines):
"""
Extracts email headers from the given lines. Returns a dict with the
detected headers and the amount of lines that were processed.
"""
hdrs = {}
header_name = None
# Track overlong headers that extend over multiple lines
extend_lines = 0
lines_processed = 0
for n, line in enumerate(lines):
if not line.strip():
header_name = None
continue
match = HEADER_RE.match(line)
if match:
header_name, header_value = match.groups()
header_name = header_name.strip().lower()
extend_lines = 0
if header_name in HEADER_MAP:
hdrs[HEADER_MAP[header_name]] = header_value.strip()
lines_processed = n+1
else:
extend_lines += 1
if extend_lines < max_wrap_lines and header_name in HEADER_MAP:
hdrs[HEADER_MAP[header_name]] = join_wrapped_lines(
[hdrs[HEADER_MAP[header_name]], line.strip()])
lines_processed = n+1
else:
# no more headers found
break
return hdrs, lines_processed
def parse_reply(line):
"""
Parses the given reply line ("On DATE, USER wrote:") and returns a
dictionary with the "Date" and "From" keys, or None, if couldn't parse.
"""
if line.startswith('>'):
line = line[1:].strip()
date = user = None
for pattern in COMPILED_PATTERN_MAP['reply']:
match = pattern.match(line)
if match:
groups = match.groups()
if len(groups) == 2:
# We're lucky and got both date and user split up.
date, user = groups
else:
split_match = REPLY_DATE_SPLIT_REGEX.match(groups[0])
if split_match:
split_groups = split_match.groups()
date = split_groups[0]
user = split_groups[-1]
else:
# Try a simple comma split
split = groups[0].rsplit(',', 1)
if len(split) == 2:
date, user = split
if date:
date = date.strip()
if user:
user = user.strip()
if date and user:
return {
'date': date.strip(),
'from': user.strip(),
}
def find_unwrap_start(lines, max_wrap_lines, min_header_lines, min_quoted_lines):
"""
Finds the starting point of a wrapped email. Returns a tuple containing
(start_line_number, end_line_number, type), where type can be one of the
following:
* 'forward': A matching forwarding pattern was found
* 'reply': A matching reply pattern was found
* 'headers': Headers were found (usually a forwarded email)
* 'quote': A quote was found
start_line_number corresponds to the line number where the forwarding/reply
pattern starts, or where the headers/quote starts. end_line_number is only
different from start_line_number if the forwarding/reply pattern spans over
multiple lines (it does not extend to the end of the headers or of the
quoted section).
Returns (None, None, None) if nothing was found.
"""
for n, line in enumerate(lines):
if not line.strip():
continue
# Find a forward / reply start pattern
end, typ = find_pattern_on_line(lines, n, max_wrap_lines)
if typ:
return n, end, typ
# Find a quote
if line.startswith('>'):
# Check if there are at least min_quoted_lines lines that match
matched_lines = 1
if matched_lines >= min_quoted_lines:
return n, n, 'quoted'
for peek_line in lines[n+1:]:
if not peek_line.strip():
continue
if not peek_line.startswith('>'):
break
else:
matched_lines += 1
if matched_lines >= min_quoted_lines:
return n, n, 'quoted'
# Find a header
match = HEADER_RE.match(line)
if match:
if len(extract_headers(lines[n:], max_wrap_lines)[0]) >= min_header_lines:
return n, n, 'headers'
return None, None, None
def unindent_lines(lines):
unquoted = []
for n, line in enumerate(lines):
if line.startswith('> '):
unquoted.append(line[2:])
elif line.startswith('>'):
unquoted.append(line[1:])
else:
break
return unquoted
def unwrap(lines, max_wrap_lines, min_header_lines, min_quoted_lines):
"""
Returns a tuple of:
- Type ('forward', 'reply', 'headers', 'quoted')
- Range of the text at the top of the wrapped message (or None)
- Headers dict (or None)
- Range of the text of the wrapped message (or None)
- Range of the text below the wrapped message (or None)
- Whether the wrapped text needs to be unindented
"""
headers = {}
# Get line number and wrapping type.
start, end, typ = find_unwrap_start(lines, max_wrap_lines, min_header_lines, min_quoted_lines)
# We found a line indicating that it's a forward/reply.
if typ in ('forward', 'reply'):
main_type = typ
if typ == 'reply':
reply_headers = parse_reply(join_wrapped_lines(lines[start:end+1]))
if reply_headers:
headers.update(reply_headers)
# Find where the headers or the quoted section starts.
# We can set min_quoted_lines to 1 because we expect a quoted section.
start2, end2, typ = find_unwrap_start(lines[end+1:], max_wrap_lines, min_header_lines, 1)
if typ == 'quoted':
# Quoted section starts. Unindent and check if there are headers.
quoted_start = end+1+start2
unquoted = unindent_lines(lines[quoted_start:])
rest_start = quoted_start + len(unquoted)
start3, end3, typ = find_unwrap_start(unquoted, max_wrap_lines, min_header_lines, min_quoted_lines)
if typ == 'headers':
hdrs, hdrs_length = extract_headers(unquoted[start3:], max_wrap_lines)
if hdrs:
headers.update(hdrs)
rest2_start = quoted_start+start3+hdrs_length
return main_type, (0, start), headers, (rest2_start, rest_start), (rest_start, None), True
else:
return main_type, (0, start), headers, (quoted_start, rest_start), (rest_start, None), True
elif typ == 'headers':
hdrs, hdrs_length = extract_headers(lines[start+1:], max_wrap_lines)
if hdrs:
headers.update(hdrs)
rest_start = start + 1 + hdrs_length
return main_type, (0, start), headers, (rest_start, None), None, False
else:
# Didn't find quoted section or headers, assume that everything
# below is the qouted text.
return main_type, (0, start), headers, (start+(start2 or 0)+1, None), None, False
# We just found headers, which usually indicates a forwarding.
elif typ == 'headers':
main_type = 'forward'
hdrs, hdrs_length = extract_headers(lines[start:], max_wrap_lines)
rest_start = start + hdrs_length
return main_type, (0, start), hdrs, (rest_start, None), None, False
# We found quoted text. Headers may be within the quoted text.
elif typ == 'quoted':
unquoted = unindent_lines(lines[start:])
rest_start = start + len(unquoted)
start2, end2, typ = find_unwrap_start(unquoted, max_wrap_lines, min_header_lines, min_quoted_lines)
if typ == 'headers':
main_type = 'forward'
hdrs, hdrs_length = extract_headers(unquoted[start2:], max_wrap_lines)
rest2_start = start + hdrs_length
return main_type, (0, start), hdrs, (rest2_start, rest_start), (rest_start, None), True
else:
main_type = 'quote'
return main_type, (None, start), None, (start, rest_start), (rest_start, None), True
|
closeio/quotequail | quotequail/_internal.py | extract_headers | python | def extract_headers(lines, max_wrap_lines):
hdrs = {}
header_name = None
# Track overlong headers that extend over multiple lines
extend_lines = 0
lines_processed = 0
for n, line in enumerate(lines):
if not line.strip():
header_name = None
continue
match = HEADER_RE.match(line)
if match:
header_name, header_value = match.groups()
header_name = header_name.strip().lower()
extend_lines = 0
if header_name in HEADER_MAP:
hdrs[HEADER_MAP[header_name]] = header_value.strip()
lines_processed = n+1
else:
extend_lines += 1
if extend_lines < max_wrap_lines and header_name in HEADER_MAP:
hdrs[HEADER_MAP[header_name]] = join_wrapped_lines(
[hdrs[HEADER_MAP[header_name]], line.strip()])
lines_processed = n+1
else:
# no more headers found
break
return hdrs, lines_processed | Extracts email headers from the given lines. Returns a dict with the
detected headers and the amount of lines that were processed. | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_internal.py#L63-L100 | [
"def join_wrapped_lines(lines):\n \"\"\"\n Join one or multiple lines that wrapped. Returns the reconstructed line.\n Takes into account proper spacing between the lines (see\n STRIP_SPACE_CHARS).\n \"\"\"\n if len(lines) == 1:\n return lines[0]\n\n joined = lines[0]\n for line in lin... | import re
from ._patterns import COMPILED_PATTERNS, COMPILED_PATTERN_MAP, HEADER_RE, HEADER_MAP, REPLY_DATE_SPLIT_REGEX, STRIP_SPACE_CHARS
"""
Internal methods. For max_wrap_lines, min_header_lines, min_quoted_lines
documentation see the corresponding constants in _patterns.py.
"""
def find_pattern_on_line(lines, n, max_wrap_lines):
"""
Finds a forward/reply pattern within the given lines on text on the given
line number and returns a tuple with the type ('reply' or 'forward') and
line number of where the pattern ends. The returned line number may be
different from the given line number in case the pattern wraps over
multiple lines.
Returns (None, None) if no pattern was found.
"""
for typ, regexes in COMPILED_PATTERN_MAP.items():
for regex in regexes:
for m in range(max_wrap_lines):
match_line = join_wrapped_lines(lines[n:n+1+m])
if match_line.startswith('>'):
match_line = match_line[1:].strip()
if regex.match(match_line.strip()):
return n+m, typ
return None, None
def find_quote_position(lines, max_wrap_lines, limit=None):
"""
Returns the (ending) line number of a quoting pattern. If a limit is given
and the limit is reached, the limit is returned.
"""
for n in range(len(lines)):
end, typ = find_pattern_on_line(lines, n, max_wrap_lines)
if typ:
return end
if limit != None and n >= limit-1:
return n
return None
def join_wrapped_lines(lines):
"""
Join one or multiple lines that wrapped. Returns the reconstructed line.
Takes into account proper spacing between the lines (see
STRIP_SPACE_CHARS).
"""
if len(lines) == 1:
return lines[0]
joined = lines[0]
for line in lines[1:]:
if joined and joined[-1] in STRIP_SPACE_CHARS:
joined += line
else:
joined += ' '
joined += line
return joined
def parse_reply(line):
"""
Parses the given reply line ("On DATE, USER wrote:") and returns a
dictionary with the "Date" and "From" keys, or None, if couldn't parse.
"""
if line.startswith('>'):
line = line[1:].strip()
date = user = None
for pattern in COMPILED_PATTERN_MAP['reply']:
match = pattern.match(line)
if match:
groups = match.groups()
if len(groups) == 2:
# We're lucky and got both date and user split up.
date, user = groups
else:
split_match = REPLY_DATE_SPLIT_REGEX.match(groups[0])
if split_match:
split_groups = split_match.groups()
date = split_groups[0]
user = split_groups[-1]
else:
# Try a simple comma split
split = groups[0].rsplit(',', 1)
if len(split) == 2:
date, user = split
if date:
date = date.strip()
if user:
user = user.strip()
if date and user:
return {
'date': date.strip(),
'from': user.strip(),
}
def find_unwrap_start(lines, max_wrap_lines, min_header_lines, min_quoted_lines):
"""
Finds the starting point of a wrapped email. Returns a tuple containing
(start_line_number, end_line_number, type), where type can be one of the
following:
* 'forward': A matching forwarding pattern was found
* 'reply': A matching reply pattern was found
* 'headers': Headers were found (usually a forwarded email)
* 'quote': A quote was found
start_line_number corresponds to the line number where the forwarding/reply
pattern starts, or where the headers/quote starts. end_line_number is only
different from start_line_number if the forwarding/reply pattern spans over
multiple lines (it does not extend to the end of the headers or of the
quoted section).
Returns (None, None, None) if nothing was found.
"""
for n, line in enumerate(lines):
if not line.strip():
continue
# Find a forward / reply start pattern
end, typ = find_pattern_on_line(lines, n, max_wrap_lines)
if typ:
return n, end, typ
# Find a quote
if line.startswith('>'):
# Check if there are at least min_quoted_lines lines that match
matched_lines = 1
if matched_lines >= min_quoted_lines:
return n, n, 'quoted'
for peek_line in lines[n+1:]:
if not peek_line.strip():
continue
if not peek_line.startswith('>'):
break
else:
matched_lines += 1
if matched_lines >= min_quoted_lines:
return n, n, 'quoted'
# Find a header
match = HEADER_RE.match(line)
if match:
if len(extract_headers(lines[n:], max_wrap_lines)[0]) >= min_header_lines:
return n, n, 'headers'
return None, None, None
def unindent_lines(lines):
unquoted = []
for n, line in enumerate(lines):
if line.startswith('> '):
unquoted.append(line[2:])
elif line.startswith('>'):
unquoted.append(line[1:])
else:
break
return unquoted
def unwrap(lines, max_wrap_lines, min_header_lines, min_quoted_lines):
"""
Returns a tuple of:
- Type ('forward', 'reply', 'headers', 'quoted')
- Range of the text at the top of the wrapped message (or None)
- Headers dict (or None)
- Range of the text of the wrapped message (or None)
- Range of the text below the wrapped message (or None)
- Whether the wrapped text needs to be unindented
"""
headers = {}
# Get line number and wrapping type.
start, end, typ = find_unwrap_start(lines, max_wrap_lines, min_header_lines, min_quoted_lines)
# We found a line indicating that it's a forward/reply.
if typ in ('forward', 'reply'):
main_type = typ
if typ == 'reply':
reply_headers = parse_reply(join_wrapped_lines(lines[start:end+1]))
if reply_headers:
headers.update(reply_headers)
# Find where the headers or the quoted section starts.
# We can set min_quoted_lines to 1 because we expect a quoted section.
start2, end2, typ = find_unwrap_start(lines[end+1:], max_wrap_lines, min_header_lines, 1)
if typ == 'quoted':
# Quoted section starts. Unindent and check if there are headers.
quoted_start = end+1+start2
unquoted = unindent_lines(lines[quoted_start:])
rest_start = quoted_start + len(unquoted)
start3, end3, typ = find_unwrap_start(unquoted, max_wrap_lines, min_header_lines, min_quoted_lines)
if typ == 'headers':
hdrs, hdrs_length = extract_headers(unquoted[start3:], max_wrap_lines)
if hdrs:
headers.update(hdrs)
rest2_start = quoted_start+start3+hdrs_length
return main_type, (0, start), headers, (rest2_start, rest_start), (rest_start, None), True
else:
return main_type, (0, start), headers, (quoted_start, rest_start), (rest_start, None), True
elif typ == 'headers':
hdrs, hdrs_length = extract_headers(lines[start+1:], max_wrap_lines)
if hdrs:
headers.update(hdrs)
rest_start = start + 1 + hdrs_length
return main_type, (0, start), headers, (rest_start, None), None, False
else:
# Didn't find quoted section or headers, assume that everything
# below is the qouted text.
return main_type, (0, start), headers, (start+(start2 or 0)+1, None), None, False
# We just found headers, which usually indicates a forwarding.
elif typ == 'headers':
main_type = 'forward'
hdrs, hdrs_length = extract_headers(lines[start:], max_wrap_lines)
rest_start = start + hdrs_length
return main_type, (0, start), hdrs, (rest_start, None), None, False
# We found quoted text. Headers may be within the quoted text.
elif typ == 'quoted':
unquoted = unindent_lines(lines[start:])
rest_start = start + len(unquoted)
start2, end2, typ = find_unwrap_start(unquoted, max_wrap_lines, min_header_lines, min_quoted_lines)
if typ == 'headers':
main_type = 'forward'
hdrs, hdrs_length = extract_headers(unquoted[start2:], max_wrap_lines)
rest2_start = start + hdrs_length
return main_type, (0, start), hdrs, (rest2_start, rest_start), (rest_start, None), True
else:
main_type = 'quote'
return main_type, (None, start), None, (start, rest_start), (rest_start, None), True
|
closeio/quotequail | quotequail/_internal.py | parse_reply | python | def parse_reply(line):
if line.startswith('>'):
line = line[1:].strip()
date = user = None
for pattern in COMPILED_PATTERN_MAP['reply']:
match = pattern.match(line)
if match:
groups = match.groups()
if len(groups) == 2:
# We're lucky and got both date and user split up.
date, user = groups
else:
split_match = REPLY_DATE_SPLIT_REGEX.match(groups[0])
if split_match:
split_groups = split_match.groups()
date = split_groups[0]
user = split_groups[-1]
else:
# Try a simple comma split
split = groups[0].rsplit(',', 1)
if len(split) == 2:
date, user = split
if date:
date = date.strip()
if user:
user = user.strip()
if date and user:
return {
'date': date.strip(),
'from': user.strip(),
} | Parses the given reply line ("On DATE, USER wrote:") and returns a
dictionary with the "Date" and "From" keys, or None, if couldn't parse. | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_internal.py#L102-L141 | null | import re
from ._patterns import COMPILED_PATTERNS, COMPILED_PATTERN_MAP, HEADER_RE, HEADER_MAP, REPLY_DATE_SPLIT_REGEX, STRIP_SPACE_CHARS
"""
Internal methods. For max_wrap_lines, min_header_lines, min_quoted_lines
documentation see the corresponding constants in _patterns.py.
"""
def find_pattern_on_line(lines, n, max_wrap_lines):
"""
Finds a forward/reply pattern within the given lines on text on the given
line number and returns a tuple with the type ('reply' or 'forward') and
line number of where the pattern ends. The returned line number may be
different from the given line number in case the pattern wraps over
multiple lines.
Returns (None, None) if no pattern was found.
"""
for typ, regexes in COMPILED_PATTERN_MAP.items():
for regex in regexes:
for m in range(max_wrap_lines):
match_line = join_wrapped_lines(lines[n:n+1+m])
if match_line.startswith('>'):
match_line = match_line[1:].strip()
if regex.match(match_line.strip()):
return n+m, typ
return None, None
def find_quote_position(lines, max_wrap_lines, limit=None):
"""
Returns the (ending) line number of a quoting pattern. If a limit is given
and the limit is reached, the limit is returned.
"""
for n in range(len(lines)):
end, typ = find_pattern_on_line(lines, n, max_wrap_lines)
if typ:
return end
if limit != None and n >= limit-1:
return n
return None
def join_wrapped_lines(lines):
"""
Join one or multiple lines that wrapped. Returns the reconstructed line.
Takes into account proper spacing between the lines (see
STRIP_SPACE_CHARS).
"""
if len(lines) == 1:
return lines[0]
joined = lines[0]
for line in lines[1:]:
if joined and joined[-1] in STRIP_SPACE_CHARS:
joined += line
else:
joined += ' '
joined += line
return joined
def extract_headers(lines, max_wrap_lines):
"""
Extracts email headers from the given lines. Returns a dict with the
detected headers and the amount of lines that were processed.
"""
hdrs = {}
header_name = None
# Track overlong headers that extend over multiple lines
extend_lines = 0
lines_processed = 0
for n, line in enumerate(lines):
if not line.strip():
header_name = None
continue
match = HEADER_RE.match(line)
if match:
header_name, header_value = match.groups()
header_name = header_name.strip().lower()
extend_lines = 0
if header_name in HEADER_MAP:
hdrs[HEADER_MAP[header_name]] = header_value.strip()
lines_processed = n+1
else:
extend_lines += 1
if extend_lines < max_wrap_lines and header_name in HEADER_MAP:
hdrs[HEADER_MAP[header_name]] = join_wrapped_lines(
[hdrs[HEADER_MAP[header_name]], line.strip()])
lines_processed = n+1
else:
# no more headers found
break
return hdrs, lines_processed
def find_unwrap_start(lines, max_wrap_lines, min_header_lines, min_quoted_lines):
"""
Finds the starting point of a wrapped email. Returns a tuple containing
(start_line_number, end_line_number, type), where type can be one of the
following:
* 'forward': A matching forwarding pattern was found
* 'reply': A matching reply pattern was found
* 'headers': Headers were found (usually a forwarded email)
* 'quote': A quote was found
start_line_number corresponds to the line number where the forwarding/reply
pattern starts, or where the headers/quote starts. end_line_number is only
different from start_line_number if the forwarding/reply pattern spans over
multiple lines (it does not extend to the end of the headers or of the
quoted section).
Returns (None, None, None) if nothing was found.
"""
for n, line in enumerate(lines):
if not line.strip():
continue
# Find a forward / reply start pattern
end, typ = find_pattern_on_line(lines, n, max_wrap_lines)
if typ:
return n, end, typ
# Find a quote
if line.startswith('>'):
# Check if there are at least min_quoted_lines lines that match
matched_lines = 1
if matched_lines >= min_quoted_lines:
return n, n, 'quoted'
for peek_line in lines[n+1:]:
if not peek_line.strip():
continue
if not peek_line.startswith('>'):
break
else:
matched_lines += 1
if matched_lines >= min_quoted_lines:
return n, n, 'quoted'
# Find a header
match = HEADER_RE.match(line)
if match:
if len(extract_headers(lines[n:], max_wrap_lines)[0]) >= min_header_lines:
return n, n, 'headers'
return None, None, None
def unindent_lines(lines):
unquoted = []
for n, line in enumerate(lines):
if line.startswith('> '):
unquoted.append(line[2:])
elif line.startswith('>'):
unquoted.append(line[1:])
else:
break
return unquoted
def unwrap(lines, max_wrap_lines, min_header_lines, min_quoted_lines):
"""
Returns a tuple of:
- Type ('forward', 'reply', 'headers', 'quoted')
- Range of the text at the top of the wrapped message (or None)
- Headers dict (or None)
- Range of the text of the wrapped message (or None)
- Range of the text below the wrapped message (or None)
- Whether the wrapped text needs to be unindented
"""
headers = {}
# Get line number and wrapping type.
start, end, typ = find_unwrap_start(lines, max_wrap_lines, min_header_lines, min_quoted_lines)
# We found a line indicating that it's a forward/reply.
if typ in ('forward', 'reply'):
main_type = typ
if typ == 'reply':
reply_headers = parse_reply(join_wrapped_lines(lines[start:end+1]))
if reply_headers:
headers.update(reply_headers)
# Find where the headers or the quoted section starts.
# We can set min_quoted_lines to 1 because we expect a quoted section.
start2, end2, typ = find_unwrap_start(lines[end+1:], max_wrap_lines, min_header_lines, 1)
if typ == 'quoted':
# Quoted section starts. Unindent and check if there are headers.
quoted_start = end+1+start2
unquoted = unindent_lines(lines[quoted_start:])
rest_start = quoted_start + len(unquoted)
start3, end3, typ = find_unwrap_start(unquoted, max_wrap_lines, min_header_lines, min_quoted_lines)
if typ == 'headers':
hdrs, hdrs_length = extract_headers(unquoted[start3:], max_wrap_lines)
if hdrs:
headers.update(hdrs)
rest2_start = quoted_start+start3+hdrs_length
return main_type, (0, start), headers, (rest2_start, rest_start), (rest_start, None), True
else:
return main_type, (0, start), headers, (quoted_start, rest_start), (rest_start, None), True
elif typ == 'headers':
hdrs, hdrs_length = extract_headers(lines[start+1:], max_wrap_lines)
if hdrs:
headers.update(hdrs)
rest_start = start + 1 + hdrs_length
return main_type, (0, start), headers, (rest_start, None), None, False
else:
# Didn't find quoted section or headers, assume that everything
# below is the qouted text.
return main_type, (0, start), headers, (start+(start2 or 0)+1, None), None, False
# We just found headers, which usually indicates a forwarding.
elif typ == 'headers':
main_type = 'forward'
hdrs, hdrs_length = extract_headers(lines[start:], max_wrap_lines)
rest_start = start + hdrs_length
return main_type, (0, start), hdrs, (rest_start, None), None, False
# We found quoted text. Headers may be within the quoted text.
elif typ == 'quoted':
unquoted = unindent_lines(lines[start:])
rest_start = start + len(unquoted)
start2, end2, typ = find_unwrap_start(unquoted, max_wrap_lines, min_header_lines, min_quoted_lines)
if typ == 'headers':
main_type = 'forward'
hdrs, hdrs_length = extract_headers(unquoted[start2:], max_wrap_lines)
rest2_start = start + hdrs_length
return main_type, (0, start), hdrs, (rest2_start, rest_start), (rest_start, None), True
else:
main_type = 'quote'
return main_type, (None, start), None, (start, rest_start), (rest_start, None), True
|
closeio/quotequail | quotequail/_internal.py | find_unwrap_start | python | def find_unwrap_start(lines, max_wrap_lines, min_header_lines, min_quoted_lines):
for n, line in enumerate(lines):
if not line.strip():
continue
# Find a forward / reply start pattern
end, typ = find_pattern_on_line(lines, n, max_wrap_lines)
if typ:
return n, end, typ
# Find a quote
if line.startswith('>'):
# Check if there are at least min_quoted_lines lines that match
matched_lines = 1
if matched_lines >= min_quoted_lines:
return n, n, 'quoted'
for peek_line in lines[n+1:]:
if not peek_line.strip():
continue
if not peek_line.startswith('>'):
break
else:
matched_lines += 1
if matched_lines >= min_quoted_lines:
return n, n, 'quoted'
# Find a header
match = HEADER_RE.match(line)
if match:
if len(extract_headers(lines[n:], max_wrap_lines)[0]) >= min_header_lines:
return n, n, 'headers'
return None, None, None | Finds the starting point of a wrapped email. Returns a tuple containing
(start_line_number, end_line_number, type), where type can be one of the
following:
* 'forward': A matching forwarding pattern was found
* 'reply': A matching reply pattern was found
* 'headers': Headers were found (usually a forwarded email)
* 'quote': A quote was found
start_line_number corresponds to the line number where the forwarding/reply
pattern starts, or where the headers/quote starts. end_line_number is only
different from start_line_number if the forwarding/reply pattern spans over
multiple lines (it does not extend to the end of the headers or of the
quoted section).
Returns (None, None, None) if nothing was found. | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_internal.py#L143-L197 | [
"def find_pattern_on_line(lines, n, max_wrap_lines):\n \"\"\"\n Finds a forward/reply pattern within the given lines on text on the given\n line number and returns a tuple with the type ('reply' or 'forward') and\n line number of where the pattern ends. The returned line number may be\n different fro... | import re
from ._patterns import COMPILED_PATTERNS, COMPILED_PATTERN_MAP, HEADER_RE, HEADER_MAP, REPLY_DATE_SPLIT_REGEX, STRIP_SPACE_CHARS
"""
Internal methods. For max_wrap_lines, min_header_lines, min_quoted_lines
documentation see the corresponding constants in _patterns.py.
"""
def find_pattern_on_line(lines, n, max_wrap_lines):
"""
Finds a forward/reply pattern within the given lines on text on the given
line number and returns a tuple with the type ('reply' or 'forward') and
line number of where the pattern ends. The returned line number may be
different from the given line number in case the pattern wraps over
multiple lines.
Returns (None, None) if no pattern was found.
"""
for typ, regexes in COMPILED_PATTERN_MAP.items():
for regex in regexes:
for m in range(max_wrap_lines):
match_line = join_wrapped_lines(lines[n:n+1+m])
if match_line.startswith('>'):
match_line = match_line[1:].strip()
if regex.match(match_line.strip()):
return n+m, typ
return None, None
def find_quote_position(lines, max_wrap_lines, limit=None):
"""
Returns the (ending) line number of a quoting pattern. If a limit is given
and the limit is reached, the limit is returned.
"""
for n in range(len(lines)):
end, typ = find_pattern_on_line(lines, n, max_wrap_lines)
if typ:
return end
if limit != None and n >= limit-1:
return n
return None
def join_wrapped_lines(lines):
"""
Join one or multiple lines that wrapped. Returns the reconstructed line.
Takes into account proper spacing between the lines (see
STRIP_SPACE_CHARS).
"""
if len(lines) == 1:
return lines[0]
joined = lines[0]
for line in lines[1:]:
if joined and joined[-1] in STRIP_SPACE_CHARS:
joined += line
else:
joined += ' '
joined += line
return joined
def extract_headers(lines, max_wrap_lines):
"""
Extracts email headers from the given lines. Returns a dict with the
detected headers and the amount of lines that were processed.
"""
hdrs = {}
header_name = None
# Track overlong headers that extend over multiple lines
extend_lines = 0
lines_processed = 0
for n, line in enumerate(lines):
if not line.strip():
header_name = None
continue
match = HEADER_RE.match(line)
if match:
header_name, header_value = match.groups()
header_name = header_name.strip().lower()
extend_lines = 0
if header_name in HEADER_MAP:
hdrs[HEADER_MAP[header_name]] = header_value.strip()
lines_processed = n+1
else:
extend_lines += 1
if extend_lines < max_wrap_lines and header_name in HEADER_MAP:
hdrs[HEADER_MAP[header_name]] = join_wrapped_lines(
[hdrs[HEADER_MAP[header_name]], line.strip()])
lines_processed = n+1
else:
# no more headers found
break
return hdrs, lines_processed
def parse_reply(line):
"""
Parses the given reply line ("On DATE, USER wrote:") and returns a
dictionary with the "Date" and "From" keys, or None, if couldn't parse.
"""
if line.startswith('>'):
line = line[1:].strip()
date = user = None
for pattern in COMPILED_PATTERN_MAP['reply']:
match = pattern.match(line)
if match:
groups = match.groups()
if len(groups) == 2:
# We're lucky and got both date and user split up.
date, user = groups
else:
split_match = REPLY_DATE_SPLIT_REGEX.match(groups[0])
if split_match:
split_groups = split_match.groups()
date = split_groups[0]
user = split_groups[-1]
else:
# Try a simple comma split
split = groups[0].rsplit(',', 1)
if len(split) == 2:
date, user = split
if date:
date = date.strip()
if user:
user = user.strip()
if date and user:
return {
'date': date.strip(),
'from': user.strip(),
}
def unindent_lines(lines):
unquoted = []
for n, line in enumerate(lines):
if line.startswith('> '):
unquoted.append(line[2:])
elif line.startswith('>'):
unquoted.append(line[1:])
else:
break
return unquoted
def unwrap(lines, max_wrap_lines, min_header_lines, min_quoted_lines):
"""
Returns a tuple of:
- Type ('forward', 'reply', 'headers', 'quoted')
- Range of the text at the top of the wrapped message (or None)
- Headers dict (or None)
- Range of the text of the wrapped message (or None)
- Range of the text below the wrapped message (or None)
- Whether the wrapped text needs to be unindented
"""
headers = {}
# Get line number and wrapping type.
start, end, typ = find_unwrap_start(lines, max_wrap_lines, min_header_lines, min_quoted_lines)
# We found a line indicating that it's a forward/reply.
if typ in ('forward', 'reply'):
main_type = typ
if typ == 'reply':
reply_headers = parse_reply(join_wrapped_lines(lines[start:end+1]))
if reply_headers:
headers.update(reply_headers)
# Find where the headers or the quoted section starts.
# We can set min_quoted_lines to 1 because we expect a quoted section.
start2, end2, typ = find_unwrap_start(lines[end+1:], max_wrap_lines, min_header_lines, 1)
if typ == 'quoted':
# Quoted section starts. Unindent and check if there are headers.
quoted_start = end+1+start2
unquoted = unindent_lines(lines[quoted_start:])
rest_start = quoted_start + len(unquoted)
start3, end3, typ = find_unwrap_start(unquoted, max_wrap_lines, min_header_lines, min_quoted_lines)
if typ == 'headers':
hdrs, hdrs_length = extract_headers(unquoted[start3:], max_wrap_lines)
if hdrs:
headers.update(hdrs)
rest2_start = quoted_start+start3+hdrs_length
return main_type, (0, start), headers, (rest2_start, rest_start), (rest_start, None), True
else:
return main_type, (0, start), headers, (quoted_start, rest_start), (rest_start, None), True
elif typ == 'headers':
hdrs, hdrs_length = extract_headers(lines[start+1:], max_wrap_lines)
if hdrs:
headers.update(hdrs)
rest_start = start + 1 + hdrs_length
return main_type, (0, start), headers, (rest_start, None), None, False
else:
# Didn't find quoted section or headers, assume that everything
# below is the qouted text.
return main_type, (0, start), headers, (start+(start2 or 0)+1, None), None, False
# We just found headers, which usually indicates a forwarding.
elif typ == 'headers':
main_type = 'forward'
hdrs, hdrs_length = extract_headers(lines[start:], max_wrap_lines)
rest_start = start + hdrs_length
return main_type, (0, start), hdrs, (rest_start, None), None, False
# We found quoted text. Headers may be within the quoted text.
elif typ == 'quoted':
unquoted = unindent_lines(lines[start:])
rest_start = start + len(unquoted)
start2, end2, typ = find_unwrap_start(unquoted, max_wrap_lines, min_header_lines, min_quoted_lines)
if typ == 'headers':
main_type = 'forward'
hdrs, hdrs_length = extract_headers(unquoted[start2:], max_wrap_lines)
rest2_start = start + hdrs_length
return main_type, (0, start), hdrs, (rest2_start, rest_start), (rest_start, None), True
else:
main_type = 'quote'
return main_type, (None, start), None, (start, rest_start), (rest_start, None), True
|
closeio/quotequail | quotequail/_internal.py | unwrap | python | def unwrap(lines, max_wrap_lines, min_header_lines, min_quoted_lines):
headers = {}
# Get line number and wrapping type.
start, end, typ = find_unwrap_start(lines, max_wrap_lines, min_header_lines, min_quoted_lines)
# We found a line indicating that it's a forward/reply.
if typ in ('forward', 'reply'):
main_type = typ
if typ == 'reply':
reply_headers = parse_reply(join_wrapped_lines(lines[start:end+1]))
if reply_headers:
headers.update(reply_headers)
# Find where the headers or the quoted section starts.
# We can set min_quoted_lines to 1 because we expect a quoted section.
start2, end2, typ = find_unwrap_start(lines[end+1:], max_wrap_lines, min_header_lines, 1)
if typ == 'quoted':
# Quoted section starts. Unindent and check if there are headers.
quoted_start = end+1+start2
unquoted = unindent_lines(lines[quoted_start:])
rest_start = quoted_start + len(unquoted)
start3, end3, typ = find_unwrap_start(unquoted, max_wrap_lines, min_header_lines, min_quoted_lines)
if typ == 'headers':
hdrs, hdrs_length = extract_headers(unquoted[start3:], max_wrap_lines)
if hdrs:
headers.update(hdrs)
rest2_start = quoted_start+start3+hdrs_length
return main_type, (0, start), headers, (rest2_start, rest_start), (rest_start, None), True
else:
return main_type, (0, start), headers, (quoted_start, rest_start), (rest_start, None), True
elif typ == 'headers':
hdrs, hdrs_length = extract_headers(lines[start+1:], max_wrap_lines)
if hdrs:
headers.update(hdrs)
rest_start = start + 1 + hdrs_length
return main_type, (0, start), headers, (rest_start, None), None, False
else:
# Didn't find quoted section or headers, assume that everything
# below is the qouted text.
return main_type, (0, start), headers, (start+(start2 or 0)+1, None), None, False
# We just found headers, which usually indicates a forwarding.
elif typ == 'headers':
main_type = 'forward'
hdrs, hdrs_length = extract_headers(lines[start:], max_wrap_lines)
rest_start = start + hdrs_length
return main_type, (0, start), hdrs, (rest_start, None), None, False
# We found quoted text. Headers may be within the quoted text.
elif typ == 'quoted':
unquoted = unindent_lines(lines[start:])
rest_start = start + len(unquoted)
start2, end2, typ = find_unwrap_start(unquoted, max_wrap_lines, min_header_lines, min_quoted_lines)
if typ == 'headers':
main_type = 'forward'
hdrs, hdrs_length = extract_headers(unquoted[start2:], max_wrap_lines)
rest2_start = start + hdrs_length
return main_type, (0, start), hdrs, (rest2_start, rest_start), (rest_start, None), True
else:
main_type = 'quote'
return main_type, (None, start), None, (start, rest_start), (rest_start, None), True | Returns a tuple of:
- Type ('forward', 'reply', 'headers', 'quoted')
- Range of the text at the top of the wrapped message (or None)
- Headers dict (or None)
- Range of the text of the wrapped message (or None)
- Range of the text below the wrapped message (or None)
- Whether the wrapped text needs to be unindented | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_internal.py#L212-L286 | [
"def unindent_lines(lines):\n unquoted = []\n for n, line in enumerate(lines):\n if line.startswith('> '):\n unquoted.append(line[2:])\n elif line.startswith('>'):\n unquoted.append(line[1:])\n else:\n break\n\n return unquoted\n",
"def join_wrapped_l... | import re
from ._patterns import COMPILED_PATTERNS, COMPILED_PATTERN_MAP, HEADER_RE, HEADER_MAP, REPLY_DATE_SPLIT_REGEX, STRIP_SPACE_CHARS
"""
Internal methods. For max_wrap_lines, min_header_lines, min_quoted_lines
documentation see the corresponding constants in _patterns.py.
"""
def find_pattern_on_line(lines, n, max_wrap_lines):
"""
Finds a forward/reply pattern within the given lines on text on the given
line number and returns a tuple with the type ('reply' or 'forward') and
line number of where the pattern ends. The returned line number may be
different from the given line number in case the pattern wraps over
multiple lines.
Returns (None, None) if no pattern was found.
"""
for typ, regexes in COMPILED_PATTERN_MAP.items():
for regex in regexes:
for m in range(max_wrap_lines):
match_line = join_wrapped_lines(lines[n:n+1+m])
if match_line.startswith('>'):
match_line = match_line[1:].strip()
if regex.match(match_line.strip()):
return n+m, typ
return None, None
def find_quote_position(lines, max_wrap_lines, limit=None):
"""
Returns the (ending) line number of a quoting pattern. If a limit is given
and the limit is reached, the limit is returned.
"""
for n in range(len(lines)):
end, typ = find_pattern_on_line(lines, n, max_wrap_lines)
if typ:
return end
if limit != None and n >= limit-1:
return n
return None
def join_wrapped_lines(lines):
"""
Join one or multiple lines that wrapped. Returns the reconstructed line.
Takes into account proper spacing between the lines (see
STRIP_SPACE_CHARS).
"""
if len(lines) == 1:
return lines[0]
joined = lines[0]
for line in lines[1:]:
if joined and joined[-1] in STRIP_SPACE_CHARS:
joined += line
else:
joined += ' '
joined += line
return joined
def extract_headers(lines, max_wrap_lines):
"""
Extracts email headers from the given lines. Returns a dict with the
detected headers and the amount of lines that were processed.
"""
hdrs = {}
header_name = None
# Track overlong headers that extend over multiple lines
extend_lines = 0
lines_processed = 0
for n, line in enumerate(lines):
if not line.strip():
header_name = None
continue
match = HEADER_RE.match(line)
if match:
header_name, header_value = match.groups()
header_name = header_name.strip().lower()
extend_lines = 0
if header_name in HEADER_MAP:
hdrs[HEADER_MAP[header_name]] = header_value.strip()
lines_processed = n+1
else:
extend_lines += 1
if extend_lines < max_wrap_lines and header_name in HEADER_MAP:
hdrs[HEADER_MAP[header_name]] = join_wrapped_lines(
[hdrs[HEADER_MAP[header_name]], line.strip()])
lines_processed = n+1
else:
# no more headers found
break
return hdrs, lines_processed
def parse_reply(line):
"""
Parses the given reply line ("On DATE, USER wrote:") and returns a
dictionary with the "Date" and "From" keys, or None, if couldn't parse.
"""
if line.startswith('>'):
line = line[1:].strip()
date = user = None
for pattern in COMPILED_PATTERN_MAP['reply']:
match = pattern.match(line)
if match:
groups = match.groups()
if len(groups) == 2:
# We're lucky and got both date and user split up.
date, user = groups
else:
split_match = REPLY_DATE_SPLIT_REGEX.match(groups[0])
if split_match:
split_groups = split_match.groups()
date = split_groups[0]
user = split_groups[-1]
else:
# Try a simple comma split
split = groups[0].rsplit(',', 1)
if len(split) == 2:
date, user = split
if date:
date = date.strip()
if user:
user = user.strip()
if date and user:
return {
'date': date.strip(),
'from': user.strip(),
}
def find_unwrap_start(lines, max_wrap_lines, min_header_lines, min_quoted_lines):
"""
Finds the starting point of a wrapped email. Returns a tuple containing
(start_line_number, end_line_number, type), where type can be one of the
following:
* 'forward': A matching forwarding pattern was found
* 'reply': A matching reply pattern was found
* 'headers': Headers were found (usually a forwarded email)
* 'quote': A quote was found
start_line_number corresponds to the line number where the forwarding/reply
pattern starts, or where the headers/quote starts. end_line_number is only
different from start_line_number if the forwarding/reply pattern spans over
multiple lines (it does not extend to the end of the headers or of the
quoted section).
Returns (None, None, None) if nothing was found.
"""
for n, line in enumerate(lines):
if not line.strip():
continue
# Find a forward / reply start pattern
end, typ = find_pattern_on_line(lines, n, max_wrap_lines)
if typ:
return n, end, typ
# Find a quote
if line.startswith('>'):
# Check if there are at least min_quoted_lines lines that match
matched_lines = 1
if matched_lines >= min_quoted_lines:
return n, n, 'quoted'
for peek_line in lines[n+1:]:
if not peek_line.strip():
continue
if not peek_line.startswith('>'):
break
else:
matched_lines += 1
if matched_lines >= min_quoted_lines:
return n, n, 'quoted'
# Find a header
match = HEADER_RE.match(line)
if match:
if len(extract_headers(lines[n:], max_wrap_lines)[0]) >= min_header_lines:
return n, n, 'headers'
return None, None, None
def unindent_lines(lines):
unquoted = []
for n, line in enumerate(lines):
if line.startswith('> '):
unquoted.append(line[2:])
elif line.startswith('>'):
unquoted.append(line[1:])
else:
break
return unquoted
|
closeio/quotequail | quotequail/_html.py | trim_tree_after | python | def trim_tree_after(element, include_element=True):
el = element
for parent_el in element.iterancestors():
el.tail = None
if el != element or include_element:
el = el.getnext()
while el is not None:
remove_el = el
el = el.getnext()
parent_el.remove(remove_el)
el = parent_el | Removes the document tree following the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed. | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_html.py#L19-L33 | null | # HTML utils
import lxml.html
import lxml.etree
from ._patterns import FORWARD_LINE, FORWARD_STYLES, MULTIPLE_WHITESPACE_RE
INLINE_TAGS = ['a', 'b', 'em', 'i', 'strong', 'span', 'font', 'q',
'object', 'bdo', 'sub', 'sup', 'center', 'td', 'th']
BEGIN = 'begin'
END = 'end'
try:
string_class = basestring # Python 2.7
except NameError:
string_class = str # Python 3.x
def trim_tree_before(element, include_element=True, keep_head=True):
"""
Removes the document tree preceding the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
parent_el.text = None
if el != element or include_element:
el = el.getprevious()
else:
parent_el.text = el.tail
while el is not None:
remove_el = el
el = el.getprevious()
tag = remove_el.tag
is_head = isinstance(tag, string_class) and tag.lower() == 'head'
if not keep_head or not is_head:
parent_el.remove(remove_el)
el = parent_el
def trim_slice(lines, slice_tuple):
"""
Trim a slice tuple (begin, end) so it starts at the first non-empty line
(obtained via indented_tree_line_generator / get_line_info) and ends at the
last non-empty line within the slice. Returns the new slice.
"""
def _empty(line):
return not line or line.strip() == '>'
if not slice_tuple:
return None
slice_start, slice_end = slice_tuple
if slice_start is None:
slice_start = 0
if slice_end is None:
slice_end = len(lines)
# Trim from beginning
while slice_start < slice_end and _empty(lines[slice_start]):
slice_start += 1
# Trim from end
while slice_end > slice_start and _empty(lines[slice_end-1]):
slice_end -= 1
return (slice_start, slice_end)
def unindent_tree(element):
"""
Removes the outermost indent. For example, the tree
"<div>A<blockqote>B<div>C<blockquote>D</blockquote>E</div>F</blockquote>G</div>"
is transformed to
"<div>A<div>B<div>C<blockquote>D</blockquote>E</div>F</div>G</div>"
"""
for el in element.iter():
if is_indentation_element(el):
el.attrib.clear()
el.tag = 'div'
return
def slice_tree(tree, start_refs, end_refs, slice_tuple, html_copy=None):
"""
Slices the HTML tree with the given start_refs and end_refs (obtained via
get_line_info) at the given slice_tuple, a tuple (start, end) containing
the start and end of the slice (or None, to start from the start / end at
the end of the tree). If html_copy is specified, a new tree is constructed
from the given HTML (which must be the equal to the original tree's HTML*).
The resulting tree is returned.
*) The reason we have to specify the HTML is that we can't reliably
construct a copy of the tree using copy.copy() (see bug
https://bugs.launchpad.net/lxml/+bug/1562550).
"""
start_ref = None
end_ref = None
if slice_tuple:
slice_start, slice_end = slice_tuple
if ((slice_start is not None and slice_start >= len(start_refs)) or
(slice_end is not None and slice_end <= 0)):
return get_html_tree('')
if slice_start != None and slice_start <= 0:
slice_start = None
if slice_end != None and slice_end >= len(start_refs):
slice_end = None
else:
slice_start, slice_end = None, None
if slice_start is not None:
start_ref = start_refs[slice_start]
if slice_end is not None:
if slice_end < len(end_refs):
end_ref = end_refs[slice_end-1]
if html_copy is not None:
et = lxml.etree.ElementTree(tree)
new_tree = get_html_tree(html_copy)
if start_ref:
selector = et.getelementpath(start_ref[0])
start_ref = (new_tree.find(selector), start_ref[1])
if end_ref:
selector = et.getelementpath(end_ref[0])
end_ref = (new_tree.find(selector), end_ref[1])
else:
new_tree = tree
if start_ref:
include_start = (start_ref[1] == BEGIN)
if end_ref:
include_end = (end_ref[1] == END)
# If start_ref is the same as end_ref, and we don't include the element,
# we are removing the entire tree. We need to handle this separately,
# otherwise trim_tree_after won't work because it can't find the already
# removed reference.
if start_ref and end_ref and start_ref[0] == end_ref[0]:
if not include_start or not include_end:
return get_html_tree('')
if start_ref:
trim_tree_before(start_ref[0], include_element=include_start)
if end_ref:
trim_tree_after(end_ref[0], include_element=include_end)
return new_tree
def get_html_tree(html):
"""
Given the HTML string, returns a LXML tree object. The tree is wrapped in
<div> elements if it doesn't have a top level tag or parsing would
otherwise result in an error. The wrapping can be later removed with
strip_wrapping().
"""
parser = lxml.html.HTMLParser(encoding='utf-8')
html = html.encode('utf8')
try:
tree = lxml.html.fromstring(html, parser=parser)
except lxml.etree.Error:
# E.g. empty document. Use dummy <div>
tree = lxml.html.fromstring('<div></div>')
# If the document doesn't start with a top level tag, wrap it with a <div>
# that will be later stripped out for consistent behavior.
if tree.tag not in lxml.html.defs.top_level_tags:
html = b'<div>%s</div>' % html
tree = lxml.html.fromstring(html, parser=parser)
# HACK for Outlook emails, where tags like <o:p> are rendered as <p>. We
# can generally ignore these tags so we replace them with <span>, which
# doesn't cause a line break. Also, we can't look up the element path of
# tags that contain colons. When rendering the tree, we will restore the
# tag name.
for el in tree.iter():
if el.nsmap or (isinstance(el.tag, string_class) and ':' in el.tag):
if el.nsmap:
actual_tag_name = '{}:{}'.format(list(el.nsmap.keys())[0], el.tag)
else:
actual_tag_name = el.tag
el.tag = 'span'
el.attrib['__tag_name'] = actual_tag_name
return tree
def strip_wrapping(html):
"""
Removes the wrapping that might have resulted when using get_html_tree().
"""
if html.startswith('<div>') and html.endswith('</div>'):
html = html[5:-6]
return html.strip()
def render_html_tree(tree):
"""
Renders the given HTML tree, and strips any wrapping that was applied in
get_html_tree().
You should avoid further processing of the given tree after calling this
method because we modify namespaced tags here.
"""
# Restore any tag names that were changed in get_html_tree()
for el in tree.iter():
if '__tag_name' in el.attrib:
actual_tag_name = el.attrib.pop('__tag_name')
el.tag = actual_tag_name
html = lxml.html.tostring(tree, encoding='utf8').decode('utf8')
return strip_wrapping(html)
def is_indentation_element(element):
if isinstance(element.tag, string_class):
return element.tag.lower() == 'blockquote'
return False
def tree_token_generator(el, indentation_level=0):
"""
Internal generator that yields tokens for the given HTML element as
follows:
- A tuple (LXML element, BEGIN, indentation_level)
- Text right after the start of the tag, or None.
- Recursively calls the token generator for all child objects
- A tuple (LXML element, END, indentation_level)
- Text right after the end of the tag, or None.
"""
if not isinstance(el.tag, string_class):
return
tag_name = el.tag.lower()
is_indentation = is_indentation_element(el)
if is_indentation:
indentation_level += 1
yield (el, BEGIN, indentation_level)
yield el.text
for child in el.iterchildren():
for token in tree_token_generator(child, indentation_level):
yield token
if is_indentation:
indentation_level -= 1
yield (el, END, indentation_level)
yield el.tail
def tree_line_generator(el, max_lines=None):
"""
Internal generator that iterates through an LXML tree and yields a tuple
per line. In this context, lines are blocks of text separated by <br> tags
or by block elements. The tuples contain the following elements:
- A tuple with the element reference (element, position) for the start
of the line. The tuple consists of:
- The LXML HTML element which references the line
- Whether the text starts at the beginning of the referenced element,
or after the closing tag
- A similar tuple indicating the ending of the line.
- The email indentation level, if detected.
- The plain (non-HTML) text of the line
If max_lines is specified, the generator stops after yielding the given
amount of lines.
For example, the HTML tree "<div>foo <span>bar</span><br>baz</div>" yields:
- ((<Element div>, 'begin'), (<Element br>, 'begin'), 0, 'foo bar')
- ((<Element br>, 'end'), (<Element div>, 'end'), 0, 'baz').
To illustrate the indentation level, the HTML tree
'<div><blockquote>hi</blockquote>world</div>' yields:
- ((<Element blockquote>, 'begin'), (<Element blockquote>, 'end'), 1, 'hi')
- ((<Element blockquote>, 'end'), (<Element div>, 'end'), 0, 'world')
"""
def _trim_spaces(text):
return MULTIPLE_WHITESPACE_RE.sub(' ', text).strip()
counter = 1
if max_lines != None and counter > max_lines:
return
# Buffer for the current line.
line = ''
# The reference tuple (element, position) for the start of the line.
start_ref = None
# The indentation level at the start of the line.
start_indentation_level = None
for token in tree_token_generator(el):
if token is None:
continue
elif isinstance(token, tuple):
el, state, indentation_level = token
tag_name = el.tag.lower()
line_break = (tag_name == 'br' and state == BEGIN)
is_block = (tag_name not in INLINE_TAGS)
is_forward = (is_block and state == BEGIN and
el.attrib.get('style') in FORWARD_STYLES)
if is_block or line_break:
line = _trim_spaces(line)
if line or line_break or is_forward:
end_ref = (el, state)
yield start_ref, end_ref, start_indentation_level, line
counter += 1
if max_lines != None and counter > max_lines:
return
line = ''
if is_forward:
# Simulate forward
yield (end_ref, end_ref, start_indentation_level,
FORWARD_LINE)
counter += 1
if max_lines != None and counter > max_lines:
return
if not line:
start_ref = (el, state)
start_indentation_level = indentation_level
elif isinstance(token, string_class):
line += token
else:
raise RuntimeError('invalid token: {}'.format(token))
line = _trim_spaces(line)
if line:
yield line
def indented_tree_line_generator(el, max_lines=None):
"""
Like tree_line_generator, but yields tuples (start_ref, end_ref, line),
where the line already takes the indentation into account by having "> "
prepended. If a line already starts with ">", it is escaped ("\\>"). This
makes it possible to reliably use methods that analyze plain text to detect
quoting.
"""
gen = tree_line_generator(el, max_lines)
for start_ref, end_ref, indentation_level, line in gen:
# Escape line
if line.startswith('>'):
line = '\\' + line
yield start_ref, end_ref, '> '*indentation_level + line
def get_line_info(tree, max_lines=None):
"""
Shortcut for indented_tree_line_generator() that returns an array of
start references, an array of corresponding end references (see
tree_line_generator() docs), and an array of corresponding lines.
"""
line_gen = indented_tree_line_generator(tree, max_lines=max_lines)
line_gen_result = list(zip(*line_gen))
if line_gen_result:
return line_gen_result
else:
return [], [], []
|
closeio/quotequail | quotequail/_html.py | trim_tree_before | python | def trim_tree_before(element, include_element=True, keep_head=True):
el = element
for parent_el in element.iterancestors():
parent_el.text = None
if el != element or include_element:
el = el.getprevious()
else:
parent_el.text = el.tail
while el is not None:
remove_el = el
el = el.getprevious()
tag = remove_el.tag
is_head = isinstance(tag, string_class) and tag.lower() == 'head'
if not keep_head or not is_head:
parent_el.remove(remove_el)
el = parent_el | Removes the document tree preceding the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed. | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_html.py#L35-L54 | null | # HTML utils
import lxml.html
import lxml.etree
from ._patterns import FORWARD_LINE, FORWARD_STYLES, MULTIPLE_WHITESPACE_RE
INLINE_TAGS = ['a', 'b', 'em', 'i', 'strong', 'span', 'font', 'q',
'object', 'bdo', 'sub', 'sup', 'center', 'td', 'th']
BEGIN = 'begin'
END = 'end'
try:
string_class = basestring # Python 2.7
except NameError:
string_class = str # Python 3.x
def trim_tree_after(element, include_element=True):
"""
Removes the document tree following the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
el.tail = None
if el != element or include_element:
el = el.getnext()
while el is not None:
remove_el = el
el = el.getnext()
parent_el.remove(remove_el)
el = parent_el
def trim_slice(lines, slice_tuple):
"""
Trim a slice tuple (begin, end) so it starts at the first non-empty line
(obtained via indented_tree_line_generator / get_line_info) and ends at the
last non-empty line within the slice. Returns the new slice.
"""
def _empty(line):
return not line or line.strip() == '>'
if not slice_tuple:
return None
slice_start, slice_end = slice_tuple
if slice_start is None:
slice_start = 0
if slice_end is None:
slice_end = len(lines)
# Trim from beginning
while slice_start < slice_end and _empty(lines[slice_start]):
slice_start += 1
# Trim from end
while slice_end > slice_start and _empty(lines[slice_end-1]):
slice_end -= 1
return (slice_start, slice_end)
def unindent_tree(element):
"""
Removes the outermost indent. For example, the tree
"<div>A<blockqote>B<div>C<blockquote>D</blockquote>E</div>F</blockquote>G</div>"
is transformed to
"<div>A<div>B<div>C<blockquote>D</blockquote>E</div>F</div>G</div>"
"""
for el in element.iter():
if is_indentation_element(el):
el.attrib.clear()
el.tag = 'div'
return
def slice_tree(tree, start_refs, end_refs, slice_tuple, html_copy=None):
"""
Slices the HTML tree with the given start_refs and end_refs (obtained via
get_line_info) at the given slice_tuple, a tuple (start, end) containing
the start and end of the slice (or None, to start from the start / end at
the end of the tree). If html_copy is specified, a new tree is constructed
from the given HTML (which must be the equal to the original tree's HTML*).
The resulting tree is returned.
*) The reason we have to specify the HTML is that we can't reliably
construct a copy of the tree using copy.copy() (see bug
https://bugs.launchpad.net/lxml/+bug/1562550).
"""
start_ref = None
end_ref = None
if slice_tuple:
slice_start, slice_end = slice_tuple
if ((slice_start is not None and slice_start >= len(start_refs)) or
(slice_end is not None and slice_end <= 0)):
return get_html_tree('')
if slice_start != None and slice_start <= 0:
slice_start = None
if slice_end != None and slice_end >= len(start_refs):
slice_end = None
else:
slice_start, slice_end = None, None
if slice_start is not None:
start_ref = start_refs[slice_start]
if slice_end is not None:
if slice_end < len(end_refs):
end_ref = end_refs[slice_end-1]
if html_copy is not None:
et = lxml.etree.ElementTree(tree)
new_tree = get_html_tree(html_copy)
if start_ref:
selector = et.getelementpath(start_ref[0])
start_ref = (new_tree.find(selector), start_ref[1])
if end_ref:
selector = et.getelementpath(end_ref[0])
end_ref = (new_tree.find(selector), end_ref[1])
else:
new_tree = tree
if start_ref:
include_start = (start_ref[1] == BEGIN)
if end_ref:
include_end = (end_ref[1] == END)
# If start_ref is the same as end_ref, and we don't include the element,
# we are removing the entire tree. We need to handle this separately,
# otherwise trim_tree_after won't work because it can't find the already
# removed reference.
if start_ref and end_ref and start_ref[0] == end_ref[0]:
if not include_start or not include_end:
return get_html_tree('')
if start_ref:
trim_tree_before(start_ref[0], include_element=include_start)
if end_ref:
trim_tree_after(end_ref[0], include_element=include_end)
return new_tree
def get_html_tree(html):
"""
Given the HTML string, returns a LXML tree object. The tree is wrapped in
<div> elements if it doesn't have a top level tag or parsing would
otherwise result in an error. The wrapping can be later removed with
strip_wrapping().
"""
parser = lxml.html.HTMLParser(encoding='utf-8')
html = html.encode('utf8')
try:
tree = lxml.html.fromstring(html, parser=parser)
except lxml.etree.Error:
# E.g. empty document. Use dummy <div>
tree = lxml.html.fromstring('<div></div>')
# If the document doesn't start with a top level tag, wrap it with a <div>
# that will be later stripped out for consistent behavior.
if tree.tag not in lxml.html.defs.top_level_tags:
html = b'<div>%s</div>' % html
tree = lxml.html.fromstring(html, parser=parser)
# HACK for Outlook emails, where tags like <o:p> are rendered as <p>. We
# can generally ignore these tags so we replace them with <span>, which
# doesn't cause a line break. Also, we can't look up the element path of
# tags that contain colons. When rendering the tree, we will restore the
# tag name.
for el in tree.iter():
if el.nsmap or (isinstance(el.tag, string_class) and ':' in el.tag):
if el.nsmap:
actual_tag_name = '{}:{}'.format(list(el.nsmap.keys())[0], el.tag)
else:
actual_tag_name = el.tag
el.tag = 'span'
el.attrib['__tag_name'] = actual_tag_name
return tree
def strip_wrapping(html):
"""
Removes the wrapping that might have resulted when using get_html_tree().
"""
if html.startswith('<div>') and html.endswith('</div>'):
html = html[5:-6]
return html.strip()
def render_html_tree(tree):
"""
Renders the given HTML tree, and strips any wrapping that was applied in
get_html_tree().
You should avoid further processing of the given tree after calling this
method because we modify namespaced tags here.
"""
# Restore any tag names that were changed in get_html_tree()
for el in tree.iter():
if '__tag_name' in el.attrib:
actual_tag_name = el.attrib.pop('__tag_name')
el.tag = actual_tag_name
html = lxml.html.tostring(tree, encoding='utf8').decode('utf8')
return strip_wrapping(html)
def is_indentation_element(element):
if isinstance(element.tag, string_class):
return element.tag.lower() == 'blockquote'
return False
def tree_token_generator(el, indentation_level=0):
"""
Internal generator that yields tokens for the given HTML element as
follows:
- A tuple (LXML element, BEGIN, indentation_level)
- Text right after the start of the tag, or None.
- Recursively calls the token generator for all child objects
- A tuple (LXML element, END, indentation_level)
- Text right after the end of the tag, or None.
"""
if not isinstance(el.tag, string_class):
return
tag_name = el.tag.lower()
is_indentation = is_indentation_element(el)
if is_indentation:
indentation_level += 1
yield (el, BEGIN, indentation_level)
yield el.text
for child in el.iterchildren():
for token in tree_token_generator(child, indentation_level):
yield token
if is_indentation:
indentation_level -= 1
yield (el, END, indentation_level)
yield el.tail
def tree_line_generator(el, max_lines=None):
"""
Internal generator that iterates through an LXML tree and yields a tuple
per line. In this context, lines are blocks of text separated by <br> tags
or by block elements. The tuples contain the following elements:
- A tuple with the element reference (element, position) for the start
of the line. The tuple consists of:
- The LXML HTML element which references the line
- Whether the text starts at the beginning of the referenced element,
or after the closing tag
- A similar tuple indicating the ending of the line.
- The email indentation level, if detected.
- The plain (non-HTML) text of the line
If max_lines is specified, the generator stops after yielding the given
amount of lines.
For example, the HTML tree "<div>foo <span>bar</span><br>baz</div>" yields:
- ((<Element div>, 'begin'), (<Element br>, 'begin'), 0, 'foo bar')
- ((<Element br>, 'end'), (<Element div>, 'end'), 0, 'baz').
To illustrate the indentation level, the HTML tree
'<div><blockquote>hi</blockquote>world</div>' yields:
- ((<Element blockquote>, 'begin'), (<Element blockquote>, 'end'), 1, 'hi')
- ((<Element blockquote>, 'end'), (<Element div>, 'end'), 0, 'world')
"""
def _trim_spaces(text):
return MULTIPLE_WHITESPACE_RE.sub(' ', text).strip()
counter = 1
if max_lines != None and counter > max_lines:
return
# Buffer for the current line.
line = ''
# The reference tuple (element, position) for the start of the line.
start_ref = None
# The indentation level at the start of the line.
start_indentation_level = None
for token in tree_token_generator(el):
if token is None:
continue
elif isinstance(token, tuple):
el, state, indentation_level = token
tag_name = el.tag.lower()
line_break = (tag_name == 'br' and state == BEGIN)
is_block = (tag_name not in INLINE_TAGS)
is_forward = (is_block and state == BEGIN and
el.attrib.get('style') in FORWARD_STYLES)
if is_block or line_break:
line = _trim_spaces(line)
if line or line_break or is_forward:
end_ref = (el, state)
yield start_ref, end_ref, start_indentation_level, line
counter += 1
if max_lines != None and counter > max_lines:
return
line = ''
if is_forward:
# Simulate forward
yield (end_ref, end_ref, start_indentation_level,
FORWARD_LINE)
counter += 1
if max_lines != None and counter > max_lines:
return
if not line:
start_ref = (el, state)
start_indentation_level = indentation_level
elif isinstance(token, string_class):
line += token
else:
raise RuntimeError('invalid token: {}'.format(token))
line = _trim_spaces(line)
if line:
yield line
def indented_tree_line_generator(el, max_lines=None):
"""
Like tree_line_generator, but yields tuples (start_ref, end_ref, line),
where the line already takes the indentation into account by having "> "
prepended. If a line already starts with ">", it is escaped ("\\>"). This
makes it possible to reliably use methods that analyze plain text to detect
quoting.
"""
gen = tree_line_generator(el, max_lines)
for start_ref, end_ref, indentation_level, line in gen:
# Escape line
if line.startswith('>'):
line = '\\' + line
yield start_ref, end_ref, '> '*indentation_level + line
def get_line_info(tree, max_lines=None):
"""
Shortcut for indented_tree_line_generator() that returns an array of
start references, an array of corresponding end references (see
tree_line_generator() docs), and an array of corresponding lines.
"""
line_gen = indented_tree_line_generator(tree, max_lines=max_lines)
line_gen_result = list(zip(*line_gen))
if line_gen_result:
return line_gen_result
else:
return [], [], []
|
closeio/quotequail | quotequail/_html.py | trim_slice | python | def trim_slice(lines, slice_tuple):
def _empty(line):
return not line or line.strip() == '>'
if not slice_tuple:
return None
slice_start, slice_end = slice_tuple
if slice_start is None:
slice_start = 0
if slice_end is None:
slice_end = len(lines)
# Trim from beginning
while slice_start < slice_end and _empty(lines[slice_start]):
slice_start += 1
# Trim from end
while slice_end > slice_start and _empty(lines[slice_end-1]):
slice_end -= 1
return (slice_start, slice_end) | Trim a slice tuple (begin, end) so it starts at the first non-empty line
(obtained via indented_tree_line_generator / get_line_info) and ends at the
last non-empty line within the slice. Returns the new slice. | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_html.py#L56-L83 | [
"def _empty(line):\n return not line or line.strip() == '>'\n"
] | # HTML utils
import lxml.html
import lxml.etree
from ._patterns import FORWARD_LINE, FORWARD_STYLES, MULTIPLE_WHITESPACE_RE
INLINE_TAGS = ['a', 'b', 'em', 'i', 'strong', 'span', 'font', 'q',
'object', 'bdo', 'sub', 'sup', 'center', 'td', 'th']
BEGIN = 'begin'
END = 'end'
try:
string_class = basestring # Python 2.7
except NameError:
string_class = str # Python 3.x
def trim_tree_after(element, include_element=True):
"""
Removes the document tree following the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
el.tail = None
if el != element or include_element:
el = el.getnext()
while el is not None:
remove_el = el
el = el.getnext()
parent_el.remove(remove_el)
el = parent_el
def trim_tree_before(element, include_element=True, keep_head=True):
"""
Removes the document tree preceding the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
parent_el.text = None
if el != element or include_element:
el = el.getprevious()
else:
parent_el.text = el.tail
while el is not None:
remove_el = el
el = el.getprevious()
tag = remove_el.tag
is_head = isinstance(tag, string_class) and tag.lower() == 'head'
if not keep_head or not is_head:
parent_el.remove(remove_el)
el = parent_el
def unindent_tree(element):
"""
Removes the outermost indent. For example, the tree
"<div>A<blockqote>B<div>C<blockquote>D</blockquote>E</div>F</blockquote>G</div>"
is transformed to
"<div>A<div>B<div>C<blockquote>D</blockquote>E</div>F</div>G</div>"
"""
for el in element.iter():
if is_indentation_element(el):
el.attrib.clear()
el.tag = 'div'
return
def slice_tree(tree, start_refs, end_refs, slice_tuple, html_copy=None):
"""
Slices the HTML tree with the given start_refs and end_refs (obtained via
get_line_info) at the given slice_tuple, a tuple (start, end) containing
the start and end of the slice (or None, to start from the start / end at
the end of the tree). If html_copy is specified, a new tree is constructed
from the given HTML (which must be the equal to the original tree's HTML*).
The resulting tree is returned.
*) The reason we have to specify the HTML is that we can't reliably
construct a copy of the tree using copy.copy() (see bug
https://bugs.launchpad.net/lxml/+bug/1562550).
"""
start_ref = None
end_ref = None
if slice_tuple:
slice_start, slice_end = slice_tuple
if ((slice_start is not None and slice_start >= len(start_refs)) or
(slice_end is not None and slice_end <= 0)):
return get_html_tree('')
if slice_start != None and slice_start <= 0:
slice_start = None
if slice_end != None and slice_end >= len(start_refs):
slice_end = None
else:
slice_start, slice_end = None, None
if slice_start is not None:
start_ref = start_refs[slice_start]
if slice_end is not None:
if slice_end < len(end_refs):
end_ref = end_refs[slice_end-1]
if html_copy is not None:
et = lxml.etree.ElementTree(tree)
new_tree = get_html_tree(html_copy)
if start_ref:
selector = et.getelementpath(start_ref[0])
start_ref = (new_tree.find(selector), start_ref[1])
if end_ref:
selector = et.getelementpath(end_ref[0])
end_ref = (new_tree.find(selector), end_ref[1])
else:
new_tree = tree
if start_ref:
include_start = (start_ref[1] == BEGIN)
if end_ref:
include_end = (end_ref[1] == END)
# If start_ref is the same as end_ref, and we don't include the element,
# we are removing the entire tree. We need to handle this separately,
# otherwise trim_tree_after won't work because it can't find the already
# removed reference.
if start_ref and end_ref and start_ref[0] == end_ref[0]:
if not include_start or not include_end:
return get_html_tree('')
if start_ref:
trim_tree_before(start_ref[0], include_element=include_start)
if end_ref:
trim_tree_after(end_ref[0], include_element=include_end)
return new_tree
def get_html_tree(html):
"""
Given the HTML string, returns a LXML tree object. The tree is wrapped in
<div> elements if it doesn't have a top level tag or parsing would
otherwise result in an error. The wrapping can be later removed with
strip_wrapping().
"""
parser = lxml.html.HTMLParser(encoding='utf-8')
html = html.encode('utf8')
try:
tree = lxml.html.fromstring(html, parser=parser)
except lxml.etree.Error:
# E.g. empty document. Use dummy <div>
tree = lxml.html.fromstring('<div></div>')
# If the document doesn't start with a top level tag, wrap it with a <div>
# that will be later stripped out for consistent behavior.
if tree.tag not in lxml.html.defs.top_level_tags:
html = b'<div>%s</div>' % html
tree = lxml.html.fromstring(html, parser=parser)
# HACK for Outlook emails, where tags like <o:p> are rendered as <p>. We
# can generally ignore these tags so we replace them with <span>, which
# doesn't cause a line break. Also, we can't look up the element path of
# tags that contain colons. When rendering the tree, we will restore the
# tag name.
for el in tree.iter():
if el.nsmap or (isinstance(el.tag, string_class) and ':' in el.tag):
if el.nsmap:
actual_tag_name = '{}:{}'.format(list(el.nsmap.keys())[0], el.tag)
else:
actual_tag_name = el.tag
el.tag = 'span'
el.attrib['__tag_name'] = actual_tag_name
return tree
def strip_wrapping(html):
"""
Removes the wrapping that might have resulted when using get_html_tree().
"""
if html.startswith('<div>') and html.endswith('</div>'):
html = html[5:-6]
return html.strip()
def render_html_tree(tree):
"""
Renders the given HTML tree, and strips any wrapping that was applied in
get_html_tree().
You should avoid further processing of the given tree after calling this
method because we modify namespaced tags here.
"""
# Restore any tag names that were changed in get_html_tree()
for el in tree.iter():
if '__tag_name' in el.attrib:
actual_tag_name = el.attrib.pop('__tag_name')
el.tag = actual_tag_name
html = lxml.html.tostring(tree, encoding='utf8').decode('utf8')
return strip_wrapping(html)
def is_indentation_element(element):
if isinstance(element.tag, string_class):
return element.tag.lower() == 'blockquote'
return False
def tree_token_generator(el, indentation_level=0):
"""
Internal generator that yields tokens for the given HTML element as
follows:
- A tuple (LXML element, BEGIN, indentation_level)
- Text right after the start of the tag, or None.
- Recursively calls the token generator for all child objects
- A tuple (LXML element, END, indentation_level)
- Text right after the end of the tag, or None.
"""
if not isinstance(el.tag, string_class):
return
tag_name = el.tag.lower()
is_indentation = is_indentation_element(el)
if is_indentation:
indentation_level += 1
yield (el, BEGIN, indentation_level)
yield el.text
for child in el.iterchildren():
for token in tree_token_generator(child, indentation_level):
yield token
if is_indentation:
indentation_level -= 1
yield (el, END, indentation_level)
yield el.tail
def tree_line_generator(el, max_lines=None):
"""
Internal generator that iterates through an LXML tree and yields a tuple
per line. In this context, lines are blocks of text separated by <br> tags
or by block elements. The tuples contain the following elements:
- A tuple with the element reference (element, position) for the start
of the line. The tuple consists of:
- The LXML HTML element which references the line
- Whether the text starts at the beginning of the referenced element,
or after the closing tag
- A similar tuple indicating the ending of the line.
- The email indentation level, if detected.
- The plain (non-HTML) text of the line
If max_lines is specified, the generator stops after yielding the given
amount of lines.
For example, the HTML tree "<div>foo <span>bar</span><br>baz</div>" yields:
- ((<Element div>, 'begin'), (<Element br>, 'begin'), 0, 'foo bar')
- ((<Element br>, 'end'), (<Element div>, 'end'), 0, 'baz').
To illustrate the indentation level, the HTML tree
'<div><blockquote>hi</blockquote>world</div>' yields:
- ((<Element blockquote>, 'begin'), (<Element blockquote>, 'end'), 1, 'hi')
- ((<Element blockquote>, 'end'), (<Element div>, 'end'), 0, 'world')
"""
def _trim_spaces(text):
return MULTIPLE_WHITESPACE_RE.sub(' ', text).strip()
counter = 1
if max_lines != None and counter > max_lines:
return
# Buffer for the current line.
line = ''
# The reference tuple (element, position) for the start of the line.
start_ref = None
# The indentation level at the start of the line.
start_indentation_level = None
for token in tree_token_generator(el):
if token is None:
continue
elif isinstance(token, tuple):
el, state, indentation_level = token
tag_name = el.tag.lower()
line_break = (tag_name == 'br' and state == BEGIN)
is_block = (tag_name not in INLINE_TAGS)
is_forward = (is_block and state == BEGIN and
el.attrib.get('style') in FORWARD_STYLES)
if is_block or line_break:
line = _trim_spaces(line)
if line or line_break or is_forward:
end_ref = (el, state)
yield start_ref, end_ref, start_indentation_level, line
counter += 1
if max_lines != None and counter > max_lines:
return
line = ''
if is_forward:
# Simulate forward
yield (end_ref, end_ref, start_indentation_level,
FORWARD_LINE)
counter += 1
if max_lines != None and counter > max_lines:
return
if not line:
start_ref = (el, state)
start_indentation_level = indentation_level
elif isinstance(token, string_class):
line += token
else:
raise RuntimeError('invalid token: {}'.format(token))
line = _trim_spaces(line)
if line:
yield line
def indented_tree_line_generator(el, max_lines=None):
"""
Like tree_line_generator, but yields tuples (start_ref, end_ref, line),
where the line already takes the indentation into account by having "> "
prepended. If a line already starts with ">", it is escaped ("\\>"). This
makes it possible to reliably use methods that analyze plain text to detect
quoting.
"""
gen = tree_line_generator(el, max_lines)
for start_ref, end_ref, indentation_level, line in gen:
# Escape line
if line.startswith('>'):
line = '\\' + line
yield start_ref, end_ref, '> '*indentation_level + line
def get_line_info(tree, max_lines=None):
"""
Shortcut for indented_tree_line_generator() that returns an array of
start references, an array of corresponding end references (see
tree_line_generator() docs), and an array of corresponding lines.
"""
line_gen = indented_tree_line_generator(tree, max_lines=max_lines)
line_gen_result = list(zip(*line_gen))
if line_gen_result:
return line_gen_result
else:
return [], [], []
|
closeio/quotequail | quotequail/_html.py | unindent_tree | python | def unindent_tree(element):
for el in element.iter():
if is_indentation_element(el):
el.attrib.clear()
el.tag = 'div'
return | Removes the outermost indent. For example, the tree
"<div>A<blockqote>B<div>C<blockquote>D</blockquote>E</div>F</blockquote>G</div>"
is transformed to
"<div>A<div>B<div>C<blockquote>D</blockquote>E</div>F</div>G</div>" | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_html.py#L85-L96 | [
"def is_indentation_element(element):\n if isinstance(element.tag, string_class):\n return element.tag.lower() == 'blockquote'\n return False\n"
] | # HTML utils
import lxml.html
import lxml.etree
from ._patterns import FORWARD_LINE, FORWARD_STYLES, MULTIPLE_WHITESPACE_RE
INLINE_TAGS = ['a', 'b', 'em', 'i', 'strong', 'span', 'font', 'q',
'object', 'bdo', 'sub', 'sup', 'center', 'td', 'th']
BEGIN = 'begin'
END = 'end'
try:
string_class = basestring # Python 2.7
except NameError:
string_class = str # Python 3.x
def trim_tree_after(element, include_element=True):
"""
Removes the document tree following the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
el.tail = None
if el != element or include_element:
el = el.getnext()
while el is not None:
remove_el = el
el = el.getnext()
parent_el.remove(remove_el)
el = parent_el
def trim_tree_before(element, include_element=True, keep_head=True):
"""
Removes the document tree preceding the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
parent_el.text = None
if el != element or include_element:
el = el.getprevious()
else:
parent_el.text = el.tail
while el is not None:
remove_el = el
el = el.getprevious()
tag = remove_el.tag
is_head = isinstance(tag, string_class) and tag.lower() == 'head'
if not keep_head or not is_head:
parent_el.remove(remove_el)
el = parent_el
def trim_slice(lines, slice_tuple):
"""
Trim a slice tuple (begin, end) so it starts at the first non-empty line
(obtained via indented_tree_line_generator / get_line_info) and ends at the
last non-empty line within the slice. Returns the new slice.
"""
def _empty(line):
return not line or line.strip() == '>'
if not slice_tuple:
return None
slice_start, slice_end = slice_tuple
if slice_start is None:
slice_start = 0
if slice_end is None:
slice_end = len(lines)
# Trim from beginning
while slice_start < slice_end and _empty(lines[slice_start]):
slice_start += 1
# Trim from end
while slice_end > slice_start and _empty(lines[slice_end-1]):
slice_end -= 1
return (slice_start, slice_end)
def slice_tree(tree, start_refs, end_refs, slice_tuple, html_copy=None):
"""
Slices the HTML tree with the given start_refs and end_refs (obtained via
get_line_info) at the given slice_tuple, a tuple (start, end) containing
the start and end of the slice (or None, to start from the start / end at
the end of the tree). If html_copy is specified, a new tree is constructed
from the given HTML (which must be the equal to the original tree's HTML*).
The resulting tree is returned.
*) The reason we have to specify the HTML is that we can't reliably
construct a copy of the tree using copy.copy() (see bug
https://bugs.launchpad.net/lxml/+bug/1562550).
"""
start_ref = None
end_ref = None
if slice_tuple:
slice_start, slice_end = slice_tuple
if ((slice_start is not None and slice_start >= len(start_refs)) or
(slice_end is not None and slice_end <= 0)):
return get_html_tree('')
if slice_start != None and slice_start <= 0:
slice_start = None
if slice_end != None and slice_end >= len(start_refs):
slice_end = None
else:
slice_start, slice_end = None, None
if slice_start is not None:
start_ref = start_refs[slice_start]
if slice_end is not None:
if slice_end < len(end_refs):
end_ref = end_refs[slice_end-1]
if html_copy is not None:
et = lxml.etree.ElementTree(tree)
new_tree = get_html_tree(html_copy)
if start_ref:
selector = et.getelementpath(start_ref[0])
start_ref = (new_tree.find(selector), start_ref[1])
if end_ref:
selector = et.getelementpath(end_ref[0])
end_ref = (new_tree.find(selector), end_ref[1])
else:
new_tree = tree
if start_ref:
include_start = (start_ref[1] == BEGIN)
if end_ref:
include_end = (end_ref[1] == END)
# If start_ref is the same as end_ref, and we don't include the element,
# we are removing the entire tree. We need to handle this separately,
# otherwise trim_tree_after won't work because it can't find the already
# removed reference.
if start_ref and end_ref and start_ref[0] == end_ref[0]:
if not include_start or not include_end:
return get_html_tree('')
if start_ref:
trim_tree_before(start_ref[0], include_element=include_start)
if end_ref:
trim_tree_after(end_ref[0], include_element=include_end)
return new_tree
def get_html_tree(html):
"""
Given the HTML string, returns a LXML tree object. The tree is wrapped in
<div> elements if it doesn't have a top level tag or parsing would
otherwise result in an error. The wrapping can be later removed with
strip_wrapping().
"""
parser = lxml.html.HTMLParser(encoding='utf-8')
html = html.encode('utf8')
try:
tree = lxml.html.fromstring(html, parser=parser)
except lxml.etree.Error:
# E.g. empty document. Use dummy <div>
tree = lxml.html.fromstring('<div></div>')
# If the document doesn't start with a top level tag, wrap it with a <div>
# that will be later stripped out for consistent behavior.
if tree.tag not in lxml.html.defs.top_level_tags:
html = b'<div>%s</div>' % html
tree = lxml.html.fromstring(html, parser=parser)
# HACK for Outlook emails, where tags like <o:p> are rendered as <p>. We
# can generally ignore these tags so we replace them with <span>, which
# doesn't cause a line break. Also, we can't look up the element path of
# tags that contain colons. When rendering the tree, we will restore the
# tag name.
for el in tree.iter():
if el.nsmap or (isinstance(el.tag, string_class) and ':' in el.tag):
if el.nsmap:
actual_tag_name = '{}:{}'.format(list(el.nsmap.keys())[0], el.tag)
else:
actual_tag_name = el.tag
el.tag = 'span'
el.attrib['__tag_name'] = actual_tag_name
return tree
def strip_wrapping(html):
"""
Removes the wrapping that might have resulted when using get_html_tree().
"""
if html.startswith('<div>') and html.endswith('</div>'):
html = html[5:-6]
return html.strip()
def render_html_tree(tree):
"""
Renders the given HTML tree, and strips any wrapping that was applied in
get_html_tree().
You should avoid further processing of the given tree after calling this
method because we modify namespaced tags here.
"""
# Restore any tag names that were changed in get_html_tree()
for el in tree.iter():
if '__tag_name' in el.attrib:
actual_tag_name = el.attrib.pop('__tag_name')
el.tag = actual_tag_name
html = lxml.html.tostring(tree, encoding='utf8').decode('utf8')
return strip_wrapping(html)
def is_indentation_element(element):
if isinstance(element.tag, string_class):
return element.tag.lower() == 'blockquote'
return False
def tree_token_generator(el, indentation_level=0):
"""
Internal generator that yields tokens for the given HTML element as
follows:
- A tuple (LXML element, BEGIN, indentation_level)
- Text right after the start of the tag, or None.
- Recursively calls the token generator for all child objects
- A tuple (LXML element, END, indentation_level)
- Text right after the end of the tag, or None.
"""
if not isinstance(el.tag, string_class):
return
tag_name = el.tag.lower()
is_indentation = is_indentation_element(el)
if is_indentation:
indentation_level += 1
yield (el, BEGIN, indentation_level)
yield el.text
for child in el.iterchildren():
for token in tree_token_generator(child, indentation_level):
yield token
if is_indentation:
indentation_level -= 1
yield (el, END, indentation_level)
yield el.tail
def tree_line_generator(el, max_lines=None):
"""
Internal generator that iterates through an LXML tree and yields a tuple
per line. In this context, lines are blocks of text separated by <br> tags
or by block elements. The tuples contain the following elements:
- A tuple with the element reference (element, position) for the start
of the line. The tuple consists of:
- The LXML HTML element which references the line
- Whether the text starts at the beginning of the referenced element,
or after the closing tag
- A similar tuple indicating the ending of the line.
- The email indentation level, if detected.
- The plain (non-HTML) text of the line
If max_lines is specified, the generator stops after yielding the given
amount of lines.
For example, the HTML tree "<div>foo <span>bar</span><br>baz</div>" yields:
- ((<Element div>, 'begin'), (<Element br>, 'begin'), 0, 'foo bar')
- ((<Element br>, 'end'), (<Element div>, 'end'), 0, 'baz').
To illustrate the indentation level, the HTML tree
'<div><blockquote>hi</blockquote>world</div>' yields:
- ((<Element blockquote>, 'begin'), (<Element blockquote>, 'end'), 1, 'hi')
- ((<Element blockquote>, 'end'), (<Element div>, 'end'), 0, 'world')
"""
def _trim_spaces(text):
return MULTIPLE_WHITESPACE_RE.sub(' ', text).strip()
counter = 1
if max_lines != None and counter > max_lines:
return
# Buffer for the current line.
line = ''
# The reference tuple (element, position) for the start of the line.
start_ref = None
# The indentation level at the start of the line.
start_indentation_level = None
for token in tree_token_generator(el):
if token is None:
continue
elif isinstance(token, tuple):
el, state, indentation_level = token
tag_name = el.tag.lower()
line_break = (tag_name == 'br' and state == BEGIN)
is_block = (tag_name not in INLINE_TAGS)
is_forward = (is_block and state == BEGIN and
el.attrib.get('style') in FORWARD_STYLES)
if is_block or line_break:
line = _trim_spaces(line)
if line or line_break or is_forward:
end_ref = (el, state)
yield start_ref, end_ref, start_indentation_level, line
counter += 1
if max_lines != None and counter > max_lines:
return
line = ''
if is_forward:
# Simulate forward
yield (end_ref, end_ref, start_indentation_level,
FORWARD_LINE)
counter += 1
if max_lines != None and counter > max_lines:
return
if not line:
start_ref = (el, state)
start_indentation_level = indentation_level
elif isinstance(token, string_class):
line += token
else:
raise RuntimeError('invalid token: {}'.format(token))
line = _trim_spaces(line)
if line:
yield line
def indented_tree_line_generator(el, max_lines=None):
"""
Like tree_line_generator, but yields tuples (start_ref, end_ref, line),
where the line already takes the indentation into account by having "> "
prepended. If a line already starts with ">", it is escaped ("\\>"). This
makes it possible to reliably use methods that analyze plain text to detect
quoting.
"""
gen = tree_line_generator(el, max_lines)
for start_ref, end_ref, indentation_level, line in gen:
# Escape line
if line.startswith('>'):
line = '\\' + line
yield start_ref, end_ref, '> '*indentation_level + line
def get_line_info(tree, max_lines=None):
"""
Shortcut for indented_tree_line_generator() that returns an array of
start references, an array of corresponding end references (see
tree_line_generator() docs), and an array of corresponding lines.
"""
line_gen = indented_tree_line_generator(tree, max_lines=max_lines)
line_gen_result = list(zip(*line_gen))
if line_gen_result:
return line_gen_result
else:
return [], [], []
|
closeio/quotequail | quotequail/_html.py | slice_tree | python | def slice_tree(tree, start_refs, end_refs, slice_tuple, html_copy=None):
start_ref = None
end_ref = None
if slice_tuple:
slice_start, slice_end = slice_tuple
if ((slice_start is not None and slice_start >= len(start_refs)) or
(slice_end is not None and slice_end <= 0)):
return get_html_tree('')
if slice_start != None and slice_start <= 0:
slice_start = None
if slice_end != None and slice_end >= len(start_refs):
slice_end = None
else:
slice_start, slice_end = None, None
if slice_start is not None:
start_ref = start_refs[slice_start]
if slice_end is not None:
if slice_end < len(end_refs):
end_ref = end_refs[slice_end-1]
if html_copy is not None:
et = lxml.etree.ElementTree(tree)
new_tree = get_html_tree(html_copy)
if start_ref:
selector = et.getelementpath(start_ref[0])
start_ref = (new_tree.find(selector), start_ref[1])
if end_ref:
selector = et.getelementpath(end_ref[0])
end_ref = (new_tree.find(selector), end_ref[1])
else:
new_tree = tree
if start_ref:
include_start = (start_ref[1] == BEGIN)
if end_ref:
include_end = (end_ref[1] == END)
# If start_ref is the same as end_ref, and we don't include the element,
# we are removing the entire tree. We need to handle this separately,
# otherwise trim_tree_after won't work because it can't find the already
# removed reference.
if start_ref and end_ref and start_ref[0] == end_ref[0]:
if not include_start or not include_end:
return get_html_tree('')
if start_ref:
trim_tree_before(start_ref[0], include_element=include_start)
if end_ref:
trim_tree_after(end_ref[0], include_element=include_end)
return new_tree | Slices the HTML tree with the given start_refs and end_refs (obtained via
get_line_info) at the given slice_tuple, a tuple (start, end) containing
the start and end of the slice (or None, to start from the start / end at
the end of the tree). If html_copy is specified, a new tree is constructed
from the given HTML (which must be the equal to the original tree's HTML*).
The resulting tree is returned.
*) The reason we have to specify the HTML is that we can't reliably
construct a copy of the tree using copy.copy() (see bug
https://bugs.launchpad.net/lxml/+bug/1562550). | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_html.py#L98-L171 | [
"def get_html_tree(html):\n \"\"\"\n Given the HTML string, returns a LXML tree object. The tree is wrapped in\n <div> elements if it doesn't have a top level tag or parsing would\n otherwise result in an error. The wrapping can be later removed with\n strip_wrapping().\n \"\"\"\n\n parser = lx... | # HTML utils
import lxml.html
import lxml.etree
from ._patterns import FORWARD_LINE, FORWARD_STYLES, MULTIPLE_WHITESPACE_RE
INLINE_TAGS = ['a', 'b', 'em', 'i', 'strong', 'span', 'font', 'q',
'object', 'bdo', 'sub', 'sup', 'center', 'td', 'th']
BEGIN = 'begin'
END = 'end'
try:
string_class = basestring # Python 2.7
except NameError:
string_class = str # Python 3.x
def trim_tree_after(element, include_element=True):
"""
Removes the document tree following the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
el.tail = None
if el != element or include_element:
el = el.getnext()
while el is not None:
remove_el = el
el = el.getnext()
parent_el.remove(remove_el)
el = parent_el
def trim_tree_before(element, include_element=True, keep_head=True):
"""
Removes the document tree preceding the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
parent_el.text = None
if el != element or include_element:
el = el.getprevious()
else:
parent_el.text = el.tail
while el is not None:
remove_el = el
el = el.getprevious()
tag = remove_el.tag
is_head = isinstance(tag, string_class) and tag.lower() == 'head'
if not keep_head or not is_head:
parent_el.remove(remove_el)
el = parent_el
def trim_slice(lines, slice_tuple):
"""
Trim a slice tuple (begin, end) so it starts at the first non-empty line
(obtained via indented_tree_line_generator / get_line_info) and ends at the
last non-empty line within the slice. Returns the new slice.
"""
def _empty(line):
return not line or line.strip() == '>'
if not slice_tuple:
return None
slice_start, slice_end = slice_tuple
if slice_start is None:
slice_start = 0
if slice_end is None:
slice_end = len(lines)
# Trim from beginning
while slice_start < slice_end and _empty(lines[slice_start]):
slice_start += 1
# Trim from end
while slice_end > slice_start and _empty(lines[slice_end-1]):
slice_end -= 1
return (slice_start, slice_end)
def unindent_tree(element):
"""
Removes the outermost indent. For example, the tree
"<div>A<blockqote>B<div>C<blockquote>D</blockquote>E</div>F</blockquote>G</div>"
is transformed to
"<div>A<div>B<div>C<blockquote>D</blockquote>E</div>F</div>G</div>"
"""
for el in element.iter():
if is_indentation_element(el):
el.attrib.clear()
el.tag = 'div'
return
def get_html_tree(html):
"""
Given the HTML string, returns a LXML tree object. The tree is wrapped in
<div> elements if it doesn't have a top level tag or parsing would
otherwise result in an error. The wrapping can be later removed with
strip_wrapping().
"""
parser = lxml.html.HTMLParser(encoding='utf-8')
html = html.encode('utf8')
try:
tree = lxml.html.fromstring(html, parser=parser)
except lxml.etree.Error:
# E.g. empty document. Use dummy <div>
tree = lxml.html.fromstring('<div></div>')
# If the document doesn't start with a top level tag, wrap it with a <div>
# that will be later stripped out for consistent behavior.
if tree.tag not in lxml.html.defs.top_level_tags:
html = b'<div>%s</div>' % html
tree = lxml.html.fromstring(html, parser=parser)
# HACK for Outlook emails, where tags like <o:p> are rendered as <p>. We
# can generally ignore these tags so we replace them with <span>, which
# doesn't cause a line break. Also, we can't look up the element path of
# tags that contain colons. When rendering the tree, we will restore the
# tag name.
for el in tree.iter():
if el.nsmap or (isinstance(el.tag, string_class) and ':' in el.tag):
if el.nsmap:
actual_tag_name = '{}:{}'.format(list(el.nsmap.keys())[0], el.tag)
else:
actual_tag_name = el.tag
el.tag = 'span'
el.attrib['__tag_name'] = actual_tag_name
return tree
def strip_wrapping(html):
"""
Removes the wrapping that might have resulted when using get_html_tree().
"""
if html.startswith('<div>') and html.endswith('</div>'):
html = html[5:-6]
return html.strip()
def render_html_tree(tree):
"""
Renders the given HTML tree, and strips any wrapping that was applied in
get_html_tree().
You should avoid further processing of the given tree after calling this
method because we modify namespaced tags here.
"""
# Restore any tag names that were changed in get_html_tree()
for el in tree.iter():
if '__tag_name' in el.attrib:
actual_tag_name = el.attrib.pop('__tag_name')
el.tag = actual_tag_name
html = lxml.html.tostring(tree, encoding='utf8').decode('utf8')
return strip_wrapping(html)
def is_indentation_element(element):
if isinstance(element.tag, string_class):
return element.tag.lower() == 'blockquote'
return False
def tree_token_generator(el, indentation_level=0):
"""
Internal generator that yields tokens for the given HTML element as
follows:
- A tuple (LXML element, BEGIN, indentation_level)
- Text right after the start of the tag, or None.
- Recursively calls the token generator for all child objects
- A tuple (LXML element, END, indentation_level)
- Text right after the end of the tag, or None.
"""
if not isinstance(el.tag, string_class):
return
tag_name = el.tag.lower()
is_indentation = is_indentation_element(el)
if is_indentation:
indentation_level += 1
yield (el, BEGIN, indentation_level)
yield el.text
for child in el.iterchildren():
for token in tree_token_generator(child, indentation_level):
yield token
if is_indentation:
indentation_level -= 1
yield (el, END, indentation_level)
yield el.tail
def tree_line_generator(el, max_lines=None):
"""
Internal generator that iterates through an LXML tree and yields a tuple
per line. In this context, lines are blocks of text separated by <br> tags
or by block elements. The tuples contain the following elements:
- A tuple with the element reference (element, position) for the start
of the line. The tuple consists of:
- The LXML HTML element which references the line
- Whether the text starts at the beginning of the referenced element,
or after the closing tag
- A similar tuple indicating the ending of the line.
- The email indentation level, if detected.
- The plain (non-HTML) text of the line
If max_lines is specified, the generator stops after yielding the given
amount of lines.
For example, the HTML tree "<div>foo <span>bar</span><br>baz</div>" yields:
- ((<Element div>, 'begin'), (<Element br>, 'begin'), 0, 'foo bar')
- ((<Element br>, 'end'), (<Element div>, 'end'), 0, 'baz').
To illustrate the indentation level, the HTML tree
'<div><blockquote>hi</blockquote>world</div>' yields:
- ((<Element blockquote>, 'begin'), (<Element blockquote>, 'end'), 1, 'hi')
- ((<Element blockquote>, 'end'), (<Element div>, 'end'), 0, 'world')
"""
def _trim_spaces(text):
return MULTIPLE_WHITESPACE_RE.sub(' ', text).strip()
counter = 1
if max_lines != None and counter > max_lines:
return
# Buffer for the current line.
line = ''
# The reference tuple (element, position) for the start of the line.
start_ref = None
# The indentation level at the start of the line.
start_indentation_level = None
for token in tree_token_generator(el):
if token is None:
continue
elif isinstance(token, tuple):
el, state, indentation_level = token
tag_name = el.tag.lower()
line_break = (tag_name == 'br' and state == BEGIN)
is_block = (tag_name not in INLINE_TAGS)
is_forward = (is_block and state == BEGIN and
el.attrib.get('style') in FORWARD_STYLES)
if is_block or line_break:
line = _trim_spaces(line)
if line or line_break or is_forward:
end_ref = (el, state)
yield start_ref, end_ref, start_indentation_level, line
counter += 1
if max_lines != None and counter > max_lines:
return
line = ''
if is_forward:
# Simulate forward
yield (end_ref, end_ref, start_indentation_level,
FORWARD_LINE)
counter += 1
if max_lines != None and counter > max_lines:
return
if not line:
start_ref = (el, state)
start_indentation_level = indentation_level
elif isinstance(token, string_class):
line += token
else:
raise RuntimeError('invalid token: {}'.format(token))
line = _trim_spaces(line)
if line:
yield line
def indented_tree_line_generator(el, max_lines=None):
"""
Like tree_line_generator, but yields tuples (start_ref, end_ref, line),
where the line already takes the indentation into account by having "> "
prepended. If a line already starts with ">", it is escaped ("\\>"). This
makes it possible to reliably use methods that analyze plain text to detect
quoting.
"""
gen = tree_line_generator(el, max_lines)
for start_ref, end_ref, indentation_level, line in gen:
# Escape line
if line.startswith('>'):
line = '\\' + line
yield start_ref, end_ref, '> '*indentation_level + line
def get_line_info(tree, max_lines=None):
"""
Shortcut for indented_tree_line_generator() that returns an array of
start references, an array of corresponding end references (see
tree_line_generator() docs), and an array of corresponding lines.
"""
line_gen = indented_tree_line_generator(tree, max_lines=max_lines)
line_gen_result = list(zip(*line_gen))
if line_gen_result:
return line_gen_result
else:
return [], [], []
|
closeio/quotequail | quotequail/_html.py | get_html_tree | python | def get_html_tree(html):
parser = lxml.html.HTMLParser(encoding='utf-8')
html = html.encode('utf8')
try:
tree = lxml.html.fromstring(html, parser=parser)
except lxml.etree.Error:
# E.g. empty document. Use dummy <div>
tree = lxml.html.fromstring('<div></div>')
# If the document doesn't start with a top level tag, wrap it with a <div>
# that will be later stripped out for consistent behavior.
if tree.tag not in lxml.html.defs.top_level_tags:
html = b'<div>%s</div>' % html
tree = lxml.html.fromstring(html, parser=parser)
# HACK for Outlook emails, where tags like <o:p> are rendered as <p>. We
# can generally ignore these tags so we replace them with <span>, which
# doesn't cause a line break. Also, we can't look up the element path of
# tags that contain colons. When rendering the tree, we will restore the
# tag name.
for el in tree.iter():
if el.nsmap or (isinstance(el.tag, string_class) and ':' in el.tag):
if el.nsmap:
actual_tag_name = '{}:{}'.format(list(el.nsmap.keys())[0], el.tag)
else:
actual_tag_name = el.tag
el.tag = 'span'
el.attrib['__tag_name'] = actual_tag_name
return tree | Given the HTML string, returns a LXML tree object. The tree is wrapped in
<div> elements if it doesn't have a top level tag or parsing would
otherwise result in an error. The wrapping can be later removed with
strip_wrapping(). | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_html.py#L173-L210 | null | # HTML utils
import lxml.html
import lxml.etree
from ._patterns import FORWARD_LINE, FORWARD_STYLES, MULTIPLE_WHITESPACE_RE
INLINE_TAGS = ['a', 'b', 'em', 'i', 'strong', 'span', 'font', 'q',
'object', 'bdo', 'sub', 'sup', 'center', 'td', 'th']
BEGIN = 'begin'
END = 'end'
try:
string_class = basestring # Python 2.7
except NameError:
string_class = str # Python 3.x
def trim_tree_after(element, include_element=True):
"""
Removes the document tree following the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
el.tail = None
if el != element or include_element:
el = el.getnext()
while el is not None:
remove_el = el
el = el.getnext()
parent_el.remove(remove_el)
el = parent_el
def trim_tree_before(element, include_element=True, keep_head=True):
"""
Removes the document tree preceding the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
parent_el.text = None
if el != element or include_element:
el = el.getprevious()
else:
parent_el.text = el.tail
while el is not None:
remove_el = el
el = el.getprevious()
tag = remove_el.tag
is_head = isinstance(tag, string_class) and tag.lower() == 'head'
if not keep_head or not is_head:
parent_el.remove(remove_el)
el = parent_el
def trim_slice(lines, slice_tuple):
"""
Trim a slice tuple (begin, end) so it starts at the first non-empty line
(obtained via indented_tree_line_generator / get_line_info) and ends at the
last non-empty line within the slice. Returns the new slice.
"""
def _empty(line):
return not line or line.strip() == '>'
if not slice_tuple:
return None
slice_start, slice_end = slice_tuple
if slice_start is None:
slice_start = 0
if slice_end is None:
slice_end = len(lines)
# Trim from beginning
while slice_start < slice_end and _empty(lines[slice_start]):
slice_start += 1
# Trim from end
while slice_end > slice_start and _empty(lines[slice_end-1]):
slice_end -= 1
return (slice_start, slice_end)
def unindent_tree(element):
"""
Removes the outermost indent. For example, the tree
"<div>A<blockqote>B<div>C<blockquote>D</blockquote>E</div>F</blockquote>G</div>"
is transformed to
"<div>A<div>B<div>C<blockquote>D</blockquote>E</div>F</div>G</div>"
"""
for el in element.iter():
if is_indentation_element(el):
el.attrib.clear()
el.tag = 'div'
return
def slice_tree(tree, start_refs, end_refs, slice_tuple, html_copy=None):
"""
Slices the HTML tree with the given start_refs and end_refs (obtained via
get_line_info) at the given slice_tuple, a tuple (start, end) containing
the start and end of the slice (or None, to start from the start / end at
the end of the tree). If html_copy is specified, a new tree is constructed
from the given HTML (which must be the equal to the original tree's HTML*).
The resulting tree is returned.
*) The reason we have to specify the HTML is that we can't reliably
construct a copy of the tree using copy.copy() (see bug
https://bugs.launchpad.net/lxml/+bug/1562550).
"""
start_ref = None
end_ref = None
if slice_tuple:
slice_start, slice_end = slice_tuple
if ((slice_start is not None and slice_start >= len(start_refs)) or
(slice_end is not None and slice_end <= 0)):
return get_html_tree('')
if slice_start != None and slice_start <= 0:
slice_start = None
if slice_end != None and slice_end >= len(start_refs):
slice_end = None
else:
slice_start, slice_end = None, None
if slice_start is not None:
start_ref = start_refs[slice_start]
if slice_end is not None:
if slice_end < len(end_refs):
end_ref = end_refs[slice_end-1]
if html_copy is not None:
et = lxml.etree.ElementTree(tree)
new_tree = get_html_tree(html_copy)
if start_ref:
selector = et.getelementpath(start_ref[0])
start_ref = (new_tree.find(selector), start_ref[1])
if end_ref:
selector = et.getelementpath(end_ref[0])
end_ref = (new_tree.find(selector), end_ref[1])
else:
new_tree = tree
if start_ref:
include_start = (start_ref[1] == BEGIN)
if end_ref:
include_end = (end_ref[1] == END)
# If start_ref is the same as end_ref, and we don't include the element,
# we are removing the entire tree. We need to handle this separately,
# otherwise trim_tree_after won't work because it can't find the already
# removed reference.
if start_ref and end_ref and start_ref[0] == end_ref[0]:
if not include_start or not include_end:
return get_html_tree('')
if start_ref:
trim_tree_before(start_ref[0], include_element=include_start)
if end_ref:
trim_tree_after(end_ref[0], include_element=include_end)
return new_tree
def strip_wrapping(html):
"""
Removes the wrapping that might have resulted when using get_html_tree().
"""
if html.startswith('<div>') and html.endswith('</div>'):
html = html[5:-6]
return html.strip()
def render_html_tree(tree):
"""
Renders the given HTML tree, and strips any wrapping that was applied in
get_html_tree().
You should avoid further processing of the given tree after calling this
method because we modify namespaced tags here.
"""
# Restore any tag names that were changed in get_html_tree()
for el in tree.iter():
if '__tag_name' in el.attrib:
actual_tag_name = el.attrib.pop('__tag_name')
el.tag = actual_tag_name
html = lxml.html.tostring(tree, encoding='utf8').decode('utf8')
return strip_wrapping(html)
def is_indentation_element(element):
if isinstance(element.tag, string_class):
return element.tag.lower() == 'blockquote'
return False
def tree_token_generator(el, indentation_level=0):
"""
Internal generator that yields tokens for the given HTML element as
follows:
- A tuple (LXML element, BEGIN, indentation_level)
- Text right after the start of the tag, or None.
- Recursively calls the token generator for all child objects
- A tuple (LXML element, END, indentation_level)
- Text right after the end of the tag, or None.
"""
if not isinstance(el.tag, string_class):
return
tag_name = el.tag.lower()
is_indentation = is_indentation_element(el)
if is_indentation:
indentation_level += 1
yield (el, BEGIN, indentation_level)
yield el.text
for child in el.iterchildren():
for token in tree_token_generator(child, indentation_level):
yield token
if is_indentation:
indentation_level -= 1
yield (el, END, indentation_level)
yield el.tail
def tree_line_generator(el, max_lines=None):
"""
Internal generator that iterates through an LXML tree and yields a tuple
per line. In this context, lines are blocks of text separated by <br> tags
or by block elements. The tuples contain the following elements:
- A tuple with the element reference (element, position) for the start
of the line. The tuple consists of:
- The LXML HTML element which references the line
- Whether the text starts at the beginning of the referenced element,
or after the closing tag
- A similar tuple indicating the ending of the line.
- The email indentation level, if detected.
- The plain (non-HTML) text of the line
If max_lines is specified, the generator stops after yielding the given
amount of lines.
For example, the HTML tree "<div>foo <span>bar</span><br>baz</div>" yields:
- ((<Element div>, 'begin'), (<Element br>, 'begin'), 0, 'foo bar')
- ((<Element br>, 'end'), (<Element div>, 'end'), 0, 'baz').
To illustrate the indentation level, the HTML tree
'<div><blockquote>hi</blockquote>world</div>' yields:
- ((<Element blockquote>, 'begin'), (<Element blockquote>, 'end'), 1, 'hi')
- ((<Element blockquote>, 'end'), (<Element div>, 'end'), 0, 'world')
"""
def _trim_spaces(text):
return MULTIPLE_WHITESPACE_RE.sub(' ', text).strip()
counter = 1
if max_lines != None and counter > max_lines:
return
# Buffer for the current line.
line = ''
# The reference tuple (element, position) for the start of the line.
start_ref = None
# The indentation level at the start of the line.
start_indentation_level = None
for token in tree_token_generator(el):
if token is None:
continue
elif isinstance(token, tuple):
el, state, indentation_level = token
tag_name = el.tag.lower()
line_break = (tag_name == 'br' and state == BEGIN)
is_block = (tag_name not in INLINE_TAGS)
is_forward = (is_block and state == BEGIN and
el.attrib.get('style') in FORWARD_STYLES)
if is_block or line_break:
line = _trim_spaces(line)
if line or line_break or is_forward:
end_ref = (el, state)
yield start_ref, end_ref, start_indentation_level, line
counter += 1
if max_lines != None and counter > max_lines:
return
line = ''
if is_forward:
# Simulate forward
yield (end_ref, end_ref, start_indentation_level,
FORWARD_LINE)
counter += 1
if max_lines != None and counter > max_lines:
return
if not line:
start_ref = (el, state)
start_indentation_level = indentation_level
elif isinstance(token, string_class):
line += token
else:
raise RuntimeError('invalid token: {}'.format(token))
line = _trim_spaces(line)
if line:
yield line
def indented_tree_line_generator(el, max_lines=None):
"""
Like tree_line_generator, but yields tuples (start_ref, end_ref, line),
where the line already takes the indentation into account by having "> "
prepended. If a line already starts with ">", it is escaped ("\\>"). This
makes it possible to reliably use methods that analyze plain text to detect
quoting.
"""
gen = tree_line_generator(el, max_lines)
for start_ref, end_ref, indentation_level, line in gen:
# Escape line
if line.startswith('>'):
line = '\\' + line
yield start_ref, end_ref, '> '*indentation_level + line
def get_line_info(tree, max_lines=None):
"""
Shortcut for indented_tree_line_generator() that returns an array of
start references, an array of corresponding end references (see
tree_line_generator() docs), and an array of corresponding lines.
"""
line_gen = indented_tree_line_generator(tree, max_lines=max_lines)
line_gen_result = list(zip(*line_gen))
if line_gen_result:
return line_gen_result
else:
return [], [], []
|
closeio/quotequail | quotequail/_html.py | strip_wrapping | python | def strip_wrapping(html):
if html.startswith('<div>') and html.endswith('</div>'):
html = html[5:-6]
return html.strip() | Removes the wrapping that might have resulted when using get_html_tree(). | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_html.py#L212-L218 | null | # HTML utils
import lxml.html
import lxml.etree
from ._patterns import FORWARD_LINE, FORWARD_STYLES, MULTIPLE_WHITESPACE_RE
INLINE_TAGS = ['a', 'b', 'em', 'i', 'strong', 'span', 'font', 'q',
'object', 'bdo', 'sub', 'sup', 'center', 'td', 'th']
BEGIN = 'begin'
END = 'end'
try:
string_class = basestring # Python 2.7
except NameError:
string_class = str # Python 3.x
def trim_tree_after(element, include_element=True):
"""
Removes the document tree following the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
el.tail = None
if el != element or include_element:
el = el.getnext()
while el is not None:
remove_el = el
el = el.getnext()
parent_el.remove(remove_el)
el = parent_el
def trim_tree_before(element, include_element=True, keep_head=True):
"""
Removes the document tree preceding the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
parent_el.text = None
if el != element or include_element:
el = el.getprevious()
else:
parent_el.text = el.tail
while el is not None:
remove_el = el
el = el.getprevious()
tag = remove_el.tag
is_head = isinstance(tag, string_class) and tag.lower() == 'head'
if not keep_head or not is_head:
parent_el.remove(remove_el)
el = parent_el
def trim_slice(lines, slice_tuple):
"""
Trim a slice tuple (begin, end) so it starts at the first non-empty line
(obtained via indented_tree_line_generator / get_line_info) and ends at the
last non-empty line within the slice. Returns the new slice.
"""
def _empty(line):
return not line or line.strip() == '>'
if not slice_tuple:
return None
slice_start, slice_end = slice_tuple
if slice_start is None:
slice_start = 0
if slice_end is None:
slice_end = len(lines)
# Trim from beginning
while slice_start < slice_end and _empty(lines[slice_start]):
slice_start += 1
# Trim from end
while slice_end > slice_start and _empty(lines[slice_end-1]):
slice_end -= 1
return (slice_start, slice_end)
def unindent_tree(element):
"""
Removes the outermost indent. For example, the tree
"<div>A<blockqote>B<div>C<blockquote>D</blockquote>E</div>F</blockquote>G</div>"
is transformed to
"<div>A<div>B<div>C<blockquote>D</blockquote>E</div>F</div>G</div>"
"""
for el in element.iter():
if is_indentation_element(el):
el.attrib.clear()
el.tag = 'div'
return
def slice_tree(tree, start_refs, end_refs, slice_tuple, html_copy=None):
"""
Slices the HTML tree with the given start_refs and end_refs (obtained via
get_line_info) at the given slice_tuple, a tuple (start, end) containing
the start and end of the slice (or None, to start from the start / end at
the end of the tree). If html_copy is specified, a new tree is constructed
from the given HTML (which must be the equal to the original tree's HTML*).
The resulting tree is returned.
*) The reason we have to specify the HTML is that we can't reliably
construct a copy of the tree using copy.copy() (see bug
https://bugs.launchpad.net/lxml/+bug/1562550).
"""
start_ref = None
end_ref = None
if slice_tuple:
slice_start, slice_end = slice_tuple
if ((slice_start is not None and slice_start >= len(start_refs)) or
(slice_end is not None and slice_end <= 0)):
return get_html_tree('')
if slice_start != None and slice_start <= 0:
slice_start = None
if slice_end != None and slice_end >= len(start_refs):
slice_end = None
else:
slice_start, slice_end = None, None
if slice_start is not None:
start_ref = start_refs[slice_start]
if slice_end is not None:
if slice_end < len(end_refs):
end_ref = end_refs[slice_end-1]
if html_copy is not None:
et = lxml.etree.ElementTree(tree)
new_tree = get_html_tree(html_copy)
if start_ref:
selector = et.getelementpath(start_ref[0])
start_ref = (new_tree.find(selector), start_ref[1])
if end_ref:
selector = et.getelementpath(end_ref[0])
end_ref = (new_tree.find(selector), end_ref[1])
else:
new_tree = tree
if start_ref:
include_start = (start_ref[1] == BEGIN)
if end_ref:
include_end = (end_ref[1] == END)
# If start_ref is the same as end_ref, and we don't include the element,
# we are removing the entire tree. We need to handle this separately,
# otherwise trim_tree_after won't work because it can't find the already
# removed reference.
if start_ref and end_ref and start_ref[0] == end_ref[0]:
if not include_start or not include_end:
return get_html_tree('')
if start_ref:
trim_tree_before(start_ref[0], include_element=include_start)
if end_ref:
trim_tree_after(end_ref[0], include_element=include_end)
return new_tree
def get_html_tree(html):
"""
Given the HTML string, returns a LXML tree object. The tree is wrapped in
<div> elements if it doesn't have a top level tag or parsing would
otherwise result in an error. The wrapping can be later removed with
strip_wrapping().
"""
parser = lxml.html.HTMLParser(encoding='utf-8')
html = html.encode('utf8')
try:
tree = lxml.html.fromstring(html, parser=parser)
except lxml.etree.Error:
# E.g. empty document. Use dummy <div>
tree = lxml.html.fromstring('<div></div>')
# If the document doesn't start with a top level tag, wrap it with a <div>
# that will be later stripped out for consistent behavior.
if tree.tag not in lxml.html.defs.top_level_tags:
html = b'<div>%s</div>' % html
tree = lxml.html.fromstring(html, parser=parser)
# HACK for Outlook emails, where tags like <o:p> are rendered as <p>. We
# can generally ignore these tags so we replace them with <span>, which
# doesn't cause a line break. Also, we can't look up the element path of
# tags that contain colons. When rendering the tree, we will restore the
# tag name.
for el in tree.iter():
if el.nsmap or (isinstance(el.tag, string_class) and ':' in el.tag):
if el.nsmap:
actual_tag_name = '{}:{}'.format(list(el.nsmap.keys())[0], el.tag)
else:
actual_tag_name = el.tag
el.tag = 'span'
el.attrib['__tag_name'] = actual_tag_name
return tree
def render_html_tree(tree):
"""
Renders the given HTML tree, and strips any wrapping that was applied in
get_html_tree().
You should avoid further processing of the given tree after calling this
method because we modify namespaced tags here.
"""
# Restore any tag names that were changed in get_html_tree()
for el in tree.iter():
if '__tag_name' in el.attrib:
actual_tag_name = el.attrib.pop('__tag_name')
el.tag = actual_tag_name
html = lxml.html.tostring(tree, encoding='utf8').decode('utf8')
return strip_wrapping(html)
def is_indentation_element(element):
if isinstance(element.tag, string_class):
return element.tag.lower() == 'blockquote'
return False
def tree_token_generator(el, indentation_level=0):
"""
Internal generator that yields tokens for the given HTML element as
follows:
- A tuple (LXML element, BEGIN, indentation_level)
- Text right after the start of the tag, or None.
- Recursively calls the token generator for all child objects
- A tuple (LXML element, END, indentation_level)
- Text right after the end of the tag, or None.
"""
if not isinstance(el.tag, string_class):
return
tag_name = el.tag.lower()
is_indentation = is_indentation_element(el)
if is_indentation:
indentation_level += 1
yield (el, BEGIN, indentation_level)
yield el.text
for child in el.iterchildren():
for token in tree_token_generator(child, indentation_level):
yield token
if is_indentation:
indentation_level -= 1
yield (el, END, indentation_level)
yield el.tail
def tree_line_generator(el, max_lines=None):
"""
Internal generator that iterates through an LXML tree and yields a tuple
per line. In this context, lines are blocks of text separated by <br> tags
or by block elements. The tuples contain the following elements:
- A tuple with the element reference (element, position) for the start
of the line. The tuple consists of:
- The LXML HTML element which references the line
- Whether the text starts at the beginning of the referenced element,
or after the closing tag
- A similar tuple indicating the ending of the line.
- The email indentation level, if detected.
- The plain (non-HTML) text of the line
If max_lines is specified, the generator stops after yielding the given
amount of lines.
For example, the HTML tree "<div>foo <span>bar</span><br>baz</div>" yields:
- ((<Element div>, 'begin'), (<Element br>, 'begin'), 0, 'foo bar')
- ((<Element br>, 'end'), (<Element div>, 'end'), 0, 'baz').
To illustrate the indentation level, the HTML tree
'<div><blockquote>hi</blockquote>world</div>' yields:
- ((<Element blockquote>, 'begin'), (<Element blockquote>, 'end'), 1, 'hi')
- ((<Element blockquote>, 'end'), (<Element div>, 'end'), 0, 'world')
"""
def _trim_spaces(text):
return MULTIPLE_WHITESPACE_RE.sub(' ', text).strip()
counter = 1
if max_lines != None and counter > max_lines:
return
# Buffer for the current line.
line = ''
# The reference tuple (element, position) for the start of the line.
start_ref = None
# The indentation level at the start of the line.
start_indentation_level = None
for token in tree_token_generator(el):
if token is None:
continue
elif isinstance(token, tuple):
el, state, indentation_level = token
tag_name = el.tag.lower()
line_break = (tag_name == 'br' and state == BEGIN)
is_block = (tag_name not in INLINE_TAGS)
is_forward = (is_block and state == BEGIN and
el.attrib.get('style') in FORWARD_STYLES)
if is_block or line_break:
line = _trim_spaces(line)
if line or line_break or is_forward:
end_ref = (el, state)
yield start_ref, end_ref, start_indentation_level, line
counter += 1
if max_lines != None and counter > max_lines:
return
line = ''
if is_forward:
# Simulate forward
yield (end_ref, end_ref, start_indentation_level,
FORWARD_LINE)
counter += 1
if max_lines != None and counter > max_lines:
return
if not line:
start_ref = (el, state)
start_indentation_level = indentation_level
elif isinstance(token, string_class):
line += token
else:
raise RuntimeError('invalid token: {}'.format(token))
line = _trim_spaces(line)
if line:
yield line
def indented_tree_line_generator(el, max_lines=None):
"""
Like tree_line_generator, but yields tuples (start_ref, end_ref, line),
where the line already takes the indentation into account by having "> "
prepended. If a line already starts with ">", it is escaped ("\\>"). This
makes it possible to reliably use methods that analyze plain text to detect
quoting.
"""
gen = tree_line_generator(el, max_lines)
for start_ref, end_ref, indentation_level, line in gen:
# Escape line
if line.startswith('>'):
line = '\\' + line
yield start_ref, end_ref, '> '*indentation_level + line
def get_line_info(tree, max_lines=None):
"""
Shortcut for indented_tree_line_generator() that returns an array of
start references, an array of corresponding end references (see
tree_line_generator() docs), and an array of corresponding lines.
"""
line_gen = indented_tree_line_generator(tree, max_lines=max_lines)
line_gen_result = list(zip(*line_gen))
if line_gen_result:
return line_gen_result
else:
return [], [], []
|
closeio/quotequail | quotequail/_html.py | render_html_tree | python | def render_html_tree(tree):
# Restore any tag names that were changed in get_html_tree()
for el in tree.iter():
if '__tag_name' in el.attrib:
actual_tag_name = el.attrib.pop('__tag_name')
el.tag = actual_tag_name
html = lxml.html.tostring(tree, encoding='utf8').decode('utf8')
return strip_wrapping(html) | Renders the given HTML tree, and strips any wrapping that was applied in
get_html_tree().
You should avoid further processing of the given tree after calling this
method because we modify namespaced tags here. | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_html.py#L220-L237 | [
"def strip_wrapping(html):\n \"\"\"\n Removes the wrapping that might have resulted when using get_html_tree().\n \"\"\"\n if html.startswith('<div>') and html.endswith('</div>'):\n html = html[5:-6]\n return html.strip()\n"
] | # HTML utils
import lxml.html
import lxml.etree
from ._patterns import FORWARD_LINE, FORWARD_STYLES, MULTIPLE_WHITESPACE_RE
INLINE_TAGS = ['a', 'b', 'em', 'i', 'strong', 'span', 'font', 'q',
'object', 'bdo', 'sub', 'sup', 'center', 'td', 'th']
BEGIN = 'begin'
END = 'end'
try:
string_class = basestring # Python 2.7
except NameError:
string_class = str # Python 3.x
def trim_tree_after(element, include_element=True):
"""
Removes the document tree following the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
el.tail = None
if el != element or include_element:
el = el.getnext()
while el is not None:
remove_el = el
el = el.getnext()
parent_el.remove(remove_el)
el = parent_el
def trim_tree_before(element, include_element=True, keep_head=True):
"""
Removes the document tree preceding the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
parent_el.text = None
if el != element or include_element:
el = el.getprevious()
else:
parent_el.text = el.tail
while el is not None:
remove_el = el
el = el.getprevious()
tag = remove_el.tag
is_head = isinstance(tag, string_class) and tag.lower() == 'head'
if not keep_head or not is_head:
parent_el.remove(remove_el)
el = parent_el
def trim_slice(lines, slice_tuple):
"""
Trim a slice tuple (begin, end) so it starts at the first non-empty line
(obtained via indented_tree_line_generator / get_line_info) and ends at the
last non-empty line within the slice. Returns the new slice.
"""
def _empty(line):
return not line or line.strip() == '>'
if not slice_tuple:
return None
slice_start, slice_end = slice_tuple
if slice_start is None:
slice_start = 0
if slice_end is None:
slice_end = len(lines)
# Trim from beginning
while slice_start < slice_end and _empty(lines[slice_start]):
slice_start += 1
# Trim from end
while slice_end > slice_start and _empty(lines[slice_end-1]):
slice_end -= 1
return (slice_start, slice_end)
def unindent_tree(element):
"""
Removes the outermost indent. For example, the tree
"<div>A<blockqote>B<div>C<blockquote>D</blockquote>E</div>F</blockquote>G</div>"
is transformed to
"<div>A<div>B<div>C<blockquote>D</blockquote>E</div>F</div>G</div>"
"""
for el in element.iter():
if is_indentation_element(el):
el.attrib.clear()
el.tag = 'div'
return
def slice_tree(tree, start_refs, end_refs, slice_tuple, html_copy=None):
"""
Slices the HTML tree with the given start_refs and end_refs (obtained via
get_line_info) at the given slice_tuple, a tuple (start, end) containing
the start and end of the slice (or None, to start from the start / end at
the end of the tree). If html_copy is specified, a new tree is constructed
from the given HTML (which must be the equal to the original tree's HTML*).
The resulting tree is returned.
*) The reason we have to specify the HTML is that we can't reliably
construct a copy of the tree using copy.copy() (see bug
https://bugs.launchpad.net/lxml/+bug/1562550).
"""
start_ref = None
end_ref = None
if slice_tuple:
slice_start, slice_end = slice_tuple
if ((slice_start is not None and slice_start >= len(start_refs)) or
(slice_end is not None and slice_end <= 0)):
return get_html_tree('')
if slice_start != None and slice_start <= 0:
slice_start = None
if slice_end != None and slice_end >= len(start_refs):
slice_end = None
else:
slice_start, slice_end = None, None
if slice_start is not None:
start_ref = start_refs[slice_start]
if slice_end is not None:
if slice_end < len(end_refs):
end_ref = end_refs[slice_end-1]
if html_copy is not None:
et = lxml.etree.ElementTree(tree)
new_tree = get_html_tree(html_copy)
if start_ref:
selector = et.getelementpath(start_ref[0])
start_ref = (new_tree.find(selector), start_ref[1])
if end_ref:
selector = et.getelementpath(end_ref[0])
end_ref = (new_tree.find(selector), end_ref[1])
else:
new_tree = tree
if start_ref:
include_start = (start_ref[1] == BEGIN)
if end_ref:
include_end = (end_ref[1] == END)
# If start_ref is the same as end_ref, and we don't include the element,
# we are removing the entire tree. We need to handle this separately,
# otherwise trim_tree_after won't work because it can't find the already
# removed reference.
if start_ref and end_ref and start_ref[0] == end_ref[0]:
if not include_start or not include_end:
return get_html_tree('')
if start_ref:
trim_tree_before(start_ref[0], include_element=include_start)
if end_ref:
trim_tree_after(end_ref[0], include_element=include_end)
return new_tree
def get_html_tree(html):
"""
Given the HTML string, returns a LXML tree object. The tree is wrapped in
<div> elements if it doesn't have a top level tag or parsing would
otherwise result in an error. The wrapping can be later removed with
strip_wrapping().
"""
parser = lxml.html.HTMLParser(encoding='utf-8')
html = html.encode('utf8')
try:
tree = lxml.html.fromstring(html, parser=parser)
except lxml.etree.Error:
# E.g. empty document. Use dummy <div>
tree = lxml.html.fromstring('<div></div>')
# If the document doesn't start with a top level tag, wrap it with a <div>
# that will be later stripped out for consistent behavior.
if tree.tag not in lxml.html.defs.top_level_tags:
html = b'<div>%s</div>' % html
tree = lxml.html.fromstring(html, parser=parser)
# HACK for Outlook emails, where tags like <o:p> are rendered as <p>. We
# can generally ignore these tags so we replace them with <span>, which
# doesn't cause a line break. Also, we can't look up the element path of
# tags that contain colons. When rendering the tree, we will restore the
# tag name.
for el in tree.iter():
if el.nsmap or (isinstance(el.tag, string_class) and ':' in el.tag):
if el.nsmap:
actual_tag_name = '{}:{}'.format(list(el.nsmap.keys())[0], el.tag)
else:
actual_tag_name = el.tag
el.tag = 'span'
el.attrib['__tag_name'] = actual_tag_name
return tree
def strip_wrapping(html):
"""
Removes the wrapping that might have resulted when using get_html_tree().
"""
if html.startswith('<div>') and html.endswith('</div>'):
html = html[5:-6]
return html.strip()
def is_indentation_element(element):
if isinstance(element.tag, string_class):
return element.tag.lower() == 'blockquote'
return False
def tree_token_generator(el, indentation_level=0):
"""
Internal generator that yields tokens for the given HTML element as
follows:
- A tuple (LXML element, BEGIN, indentation_level)
- Text right after the start of the tag, or None.
- Recursively calls the token generator for all child objects
- A tuple (LXML element, END, indentation_level)
- Text right after the end of the tag, or None.
"""
if not isinstance(el.tag, string_class):
return
tag_name = el.tag.lower()
is_indentation = is_indentation_element(el)
if is_indentation:
indentation_level += 1
yield (el, BEGIN, indentation_level)
yield el.text
for child in el.iterchildren():
for token in tree_token_generator(child, indentation_level):
yield token
if is_indentation:
indentation_level -= 1
yield (el, END, indentation_level)
yield el.tail
def tree_line_generator(el, max_lines=None):
"""
Internal generator that iterates through an LXML tree and yields a tuple
per line. In this context, lines are blocks of text separated by <br> tags
or by block elements. The tuples contain the following elements:
- A tuple with the element reference (element, position) for the start
of the line. The tuple consists of:
- The LXML HTML element which references the line
- Whether the text starts at the beginning of the referenced element,
or after the closing tag
- A similar tuple indicating the ending of the line.
- The email indentation level, if detected.
- The plain (non-HTML) text of the line
If max_lines is specified, the generator stops after yielding the given
amount of lines.
For example, the HTML tree "<div>foo <span>bar</span><br>baz</div>" yields:
- ((<Element div>, 'begin'), (<Element br>, 'begin'), 0, 'foo bar')
- ((<Element br>, 'end'), (<Element div>, 'end'), 0, 'baz').
To illustrate the indentation level, the HTML tree
'<div><blockquote>hi</blockquote>world</div>' yields:
- ((<Element blockquote>, 'begin'), (<Element blockquote>, 'end'), 1, 'hi')
- ((<Element blockquote>, 'end'), (<Element div>, 'end'), 0, 'world')
"""
def _trim_spaces(text):
return MULTIPLE_WHITESPACE_RE.sub(' ', text).strip()
counter = 1
if max_lines != None and counter > max_lines:
return
# Buffer for the current line.
line = ''
# The reference tuple (element, position) for the start of the line.
start_ref = None
# The indentation level at the start of the line.
start_indentation_level = None
for token in tree_token_generator(el):
if token is None:
continue
elif isinstance(token, tuple):
el, state, indentation_level = token
tag_name = el.tag.lower()
line_break = (tag_name == 'br' and state == BEGIN)
is_block = (tag_name not in INLINE_TAGS)
is_forward = (is_block and state == BEGIN and
el.attrib.get('style') in FORWARD_STYLES)
if is_block or line_break:
line = _trim_spaces(line)
if line or line_break or is_forward:
end_ref = (el, state)
yield start_ref, end_ref, start_indentation_level, line
counter += 1
if max_lines != None and counter > max_lines:
return
line = ''
if is_forward:
# Simulate forward
yield (end_ref, end_ref, start_indentation_level,
FORWARD_LINE)
counter += 1
if max_lines != None and counter > max_lines:
return
if not line:
start_ref = (el, state)
start_indentation_level = indentation_level
elif isinstance(token, string_class):
line += token
else:
raise RuntimeError('invalid token: {}'.format(token))
line = _trim_spaces(line)
if line:
yield line
def indented_tree_line_generator(el, max_lines=None):
"""
Like tree_line_generator, but yields tuples (start_ref, end_ref, line),
where the line already takes the indentation into account by having "> "
prepended. If a line already starts with ">", it is escaped ("\\>"). This
makes it possible to reliably use methods that analyze plain text to detect
quoting.
"""
gen = tree_line_generator(el, max_lines)
for start_ref, end_ref, indentation_level, line in gen:
# Escape line
if line.startswith('>'):
line = '\\' + line
yield start_ref, end_ref, '> '*indentation_level + line
def get_line_info(tree, max_lines=None):
"""
Shortcut for indented_tree_line_generator() that returns an array of
start references, an array of corresponding end references (see
tree_line_generator() docs), and an array of corresponding lines.
"""
line_gen = indented_tree_line_generator(tree, max_lines=max_lines)
line_gen_result = list(zip(*line_gen))
if line_gen_result:
return line_gen_result
else:
return [], [], []
|
closeio/quotequail | quotequail/_html.py | tree_token_generator | python | def tree_token_generator(el, indentation_level=0):
if not isinstance(el.tag, string_class):
return
tag_name = el.tag.lower()
is_indentation = is_indentation_element(el)
if is_indentation:
indentation_level += 1
yield (el, BEGIN, indentation_level)
yield el.text
for child in el.iterchildren():
for token in tree_token_generator(child, indentation_level):
yield token
if is_indentation:
indentation_level -= 1
yield (el, END, indentation_level)
yield el.tail | Internal generator that yields tokens for the given HTML element as
follows:
- A tuple (LXML element, BEGIN, indentation_level)
- Text right after the start of the tag, or None.
- Recursively calls the token generator for all child objects
- A tuple (LXML element, END, indentation_level)
- Text right after the end of the tag, or None. | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_html.py#L244-L279 | [
"def is_indentation_element(element):\n if isinstance(element.tag, string_class):\n return element.tag.lower() == 'blockquote'\n return False\n",
"def tree_token_generator(el, indentation_level=0):\n \"\"\"\n Internal generator that yields tokens for the given HTML element as\n follows:\n\n ... | # HTML utils
import lxml.html
import lxml.etree
from ._patterns import FORWARD_LINE, FORWARD_STYLES, MULTIPLE_WHITESPACE_RE
INLINE_TAGS = ['a', 'b', 'em', 'i', 'strong', 'span', 'font', 'q',
'object', 'bdo', 'sub', 'sup', 'center', 'td', 'th']
BEGIN = 'begin'
END = 'end'
try:
string_class = basestring # Python 2.7
except NameError:
string_class = str # Python 3.x
def trim_tree_after(element, include_element=True):
"""
Removes the document tree following the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
el.tail = None
if el != element or include_element:
el = el.getnext()
while el is not None:
remove_el = el
el = el.getnext()
parent_el.remove(remove_el)
el = parent_el
def trim_tree_before(element, include_element=True, keep_head=True):
"""
Removes the document tree preceding the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
parent_el.text = None
if el != element or include_element:
el = el.getprevious()
else:
parent_el.text = el.tail
while el is not None:
remove_el = el
el = el.getprevious()
tag = remove_el.tag
is_head = isinstance(tag, string_class) and tag.lower() == 'head'
if not keep_head or not is_head:
parent_el.remove(remove_el)
el = parent_el
def trim_slice(lines, slice_tuple):
"""
Trim a slice tuple (begin, end) so it starts at the first non-empty line
(obtained via indented_tree_line_generator / get_line_info) and ends at the
last non-empty line within the slice. Returns the new slice.
"""
def _empty(line):
return not line or line.strip() == '>'
if not slice_tuple:
return None
slice_start, slice_end = slice_tuple
if slice_start is None:
slice_start = 0
if slice_end is None:
slice_end = len(lines)
# Trim from beginning
while slice_start < slice_end and _empty(lines[slice_start]):
slice_start += 1
# Trim from end
while slice_end > slice_start and _empty(lines[slice_end-1]):
slice_end -= 1
return (slice_start, slice_end)
def unindent_tree(element):
"""
Removes the outermost indent. For example, the tree
"<div>A<blockqote>B<div>C<blockquote>D</blockquote>E</div>F</blockquote>G</div>"
is transformed to
"<div>A<div>B<div>C<blockquote>D</blockquote>E</div>F</div>G</div>"
"""
for el in element.iter():
if is_indentation_element(el):
el.attrib.clear()
el.tag = 'div'
return
def slice_tree(tree, start_refs, end_refs, slice_tuple, html_copy=None):
"""
Slices the HTML tree with the given start_refs and end_refs (obtained via
get_line_info) at the given slice_tuple, a tuple (start, end) containing
the start and end of the slice (or None, to start from the start / end at
the end of the tree). If html_copy is specified, a new tree is constructed
from the given HTML (which must be the equal to the original tree's HTML*).
The resulting tree is returned.
*) The reason we have to specify the HTML is that we can't reliably
construct a copy of the tree using copy.copy() (see bug
https://bugs.launchpad.net/lxml/+bug/1562550).
"""
start_ref = None
end_ref = None
if slice_tuple:
slice_start, slice_end = slice_tuple
if ((slice_start is not None and slice_start >= len(start_refs)) or
(slice_end is not None and slice_end <= 0)):
return get_html_tree('')
if slice_start != None and slice_start <= 0:
slice_start = None
if slice_end != None and slice_end >= len(start_refs):
slice_end = None
else:
slice_start, slice_end = None, None
if slice_start is not None:
start_ref = start_refs[slice_start]
if slice_end is not None:
if slice_end < len(end_refs):
end_ref = end_refs[slice_end-1]
if html_copy is not None:
et = lxml.etree.ElementTree(tree)
new_tree = get_html_tree(html_copy)
if start_ref:
selector = et.getelementpath(start_ref[0])
start_ref = (new_tree.find(selector), start_ref[1])
if end_ref:
selector = et.getelementpath(end_ref[0])
end_ref = (new_tree.find(selector), end_ref[1])
else:
new_tree = tree
if start_ref:
include_start = (start_ref[1] == BEGIN)
if end_ref:
include_end = (end_ref[1] == END)
# If start_ref is the same as end_ref, and we don't include the element,
# we are removing the entire tree. We need to handle this separately,
# otherwise trim_tree_after won't work because it can't find the already
# removed reference.
if start_ref and end_ref and start_ref[0] == end_ref[0]:
if not include_start or not include_end:
return get_html_tree('')
if start_ref:
trim_tree_before(start_ref[0], include_element=include_start)
if end_ref:
trim_tree_after(end_ref[0], include_element=include_end)
return new_tree
def get_html_tree(html):
"""
Given the HTML string, returns a LXML tree object. The tree is wrapped in
<div> elements if it doesn't have a top level tag or parsing would
otherwise result in an error. The wrapping can be later removed with
strip_wrapping().
"""
parser = lxml.html.HTMLParser(encoding='utf-8')
html = html.encode('utf8')
try:
tree = lxml.html.fromstring(html, parser=parser)
except lxml.etree.Error:
# E.g. empty document. Use dummy <div>
tree = lxml.html.fromstring('<div></div>')
# If the document doesn't start with a top level tag, wrap it with a <div>
# that will be later stripped out for consistent behavior.
if tree.tag not in lxml.html.defs.top_level_tags:
html = b'<div>%s</div>' % html
tree = lxml.html.fromstring(html, parser=parser)
# HACK for Outlook emails, where tags like <o:p> are rendered as <p>. We
# can generally ignore these tags so we replace them with <span>, which
# doesn't cause a line break. Also, we can't look up the element path of
# tags that contain colons. When rendering the tree, we will restore the
# tag name.
for el in tree.iter():
if el.nsmap or (isinstance(el.tag, string_class) and ':' in el.tag):
if el.nsmap:
actual_tag_name = '{}:{}'.format(list(el.nsmap.keys())[0], el.tag)
else:
actual_tag_name = el.tag
el.tag = 'span'
el.attrib['__tag_name'] = actual_tag_name
return tree
def strip_wrapping(html):
"""
Removes the wrapping that might have resulted when using get_html_tree().
"""
if html.startswith('<div>') and html.endswith('</div>'):
html = html[5:-6]
return html.strip()
def render_html_tree(tree):
"""
Renders the given HTML tree, and strips any wrapping that was applied in
get_html_tree().
You should avoid further processing of the given tree after calling this
method because we modify namespaced tags here.
"""
# Restore any tag names that were changed in get_html_tree()
for el in tree.iter():
if '__tag_name' in el.attrib:
actual_tag_name = el.attrib.pop('__tag_name')
el.tag = actual_tag_name
html = lxml.html.tostring(tree, encoding='utf8').decode('utf8')
return strip_wrapping(html)
def is_indentation_element(element):
if isinstance(element.tag, string_class):
return element.tag.lower() == 'blockquote'
return False
def tree_line_generator(el, max_lines=None):
"""
Internal generator that iterates through an LXML tree and yields a tuple
per line. In this context, lines are blocks of text separated by <br> tags
or by block elements. The tuples contain the following elements:
- A tuple with the element reference (element, position) for the start
of the line. The tuple consists of:
- The LXML HTML element which references the line
- Whether the text starts at the beginning of the referenced element,
or after the closing tag
- A similar tuple indicating the ending of the line.
- The email indentation level, if detected.
- The plain (non-HTML) text of the line
If max_lines is specified, the generator stops after yielding the given
amount of lines.
For example, the HTML tree "<div>foo <span>bar</span><br>baz</div>" yields:
- ((<Element div>, 'begin'), (<Element br>, 'begin'), 0, 'foo bar')
- ((<Element br>, 'end'), (<Element div>, 'end'), 0, 'baz').
To illustrate the indentation level, the HTML tree
'<div><blockquote>hi</blockquote>world</div>' yields:
- ((<Element blockquote>, 'begin'), (<Element blockquote>, 'end'), 1, 'hi')
- ((<Element blockquote>, 'end'), (<Element div>, 'end'), 0, 'world')
"""
def _trim_spaces(text):
return MULTIPLE_WHITESPACE_RE.sub(' ', text).strip()
counter = 1
if max_lines != None and counter > max_lines:
return
# Buffer for the current line.
line = ''
# The reference tuple (element, position) for the start of the line.
start_ref = None
# The indentation level at the start of the line.
start_indentation_level = None
for token in tree_token_generator(el):
if token is None:
continue
elif isinstance(token, tuple):
el, state, indentation_level = token
tag_name = el.tag.lower()
line_break = (tag_name == 'br' and state == BEGIN)
is_block = (tag_name not in INLINE_TAGS)
is_forward = (is_block and state == BEGIN and
el.attrib.get('style') in FORWARD_STYLES)
if is_block or line_break:
line = _trim_spaces(line)
if line or line_break or is_forward:
end_ref = (el, state)
yield start_ref, end_ref, start_indentation_level, line
counter += 1
if max_lines != None and counter > max_lines:
return
line = ''
if is_forward:
# Simulate forward
yield (end_ref, end_ref, start_indentation_level,
FORWARD_LINE)
counter += 1
if max_lines != None and counter > max_lines:
return
if not line:
start_ref = (el, state)
start_indentation_level = indentation_level
elif isinstance(token, string_class):
line += token
else:
raise RuntimeError('invalid token: {}'.format(token))
line = _trim_spaces(line)
if line:
yield line
def indented_tree_line_generator(el, max_lines=None):
"""
Like tree_line_generator, but yields tuples (start_ref, end_ref, line),
where the line already takes the indentation into account by having "> "
prepended. If a line already starts with ">", it is escaped ("\\>"). This
makes it possible to reliably use methods that analyze plain text to detect
quoting.
"""
gen = tree_line_generator(el, max_lines)
for start_ref, end_ref, indentation_level, line in gen:
# Escape line
if line.startswith('>'):
line = '\\' + line
yield start_ref, end_ref, '> '*indentation_level + line
def get_line_info(tree, max_lines=None):
"""
Shortcut for indented_tree_line_generator() that returns an array of
start references, an array of corresponding end references (see
tree_line_generator() docs), and an array of corresponding lines.
"""
line_gen = indented_tree_line_generator(tree, max_lines=max_lines)
line_gen_result = list(zip(*line_gen))
if line_gen_result:
return line_gen_result
else:
return [], [], []
|
closeio/quotequail | quotequail/_html.py | tree_line_generator | python | def tree_line_generator(el, max_lines=None):
def _trim_spaces(text):
return MULTIPLE_WHITESPACE_RE.sub(' ', text).strip()
counter = 1
if max_lines != None and counter > max_lines:
return
# Buffer for the current line.
line = ''
# The reference tuple (element, position) for the start of the line.
start_ref = None
# The indentation level at the start of the line.
start_indentation_level = None
for token in tree_token_generator(el):
if token is None:
continue
elif isinstance(token, tuple):
el, state, indentation_level = token
tag_name = el.tag.lower()
line_break = (tag_name == 'br' and state == BEGIN)
is_block = (tag_name not in INLINE_TAGS)
is_forward = (is_block and state == BEGIN and
el.attrib.get('style') in FORWARD_STYLES)
if is_block or line_break:
line = _trim_spaces(line)
if line or line_break or is_forward:
end_ref = (el, state)
yield start_ref, end_ref, start_indentation_level, line
counter += 1
if max_lines != None and counter > max_lines:
return
line = ''
if is_forward:
# Simulate forward
yield (end_ref, end_ref, start_indentation_level,
FORWARD_LINE)
counter += 1
if max_lines != None and counter > max_lines:
return
if not line:
start_ref = (el, state)
start_indentation_level = indentation_level
elif isinstance(token, string_class):
line += token
else:
raise RuntimeError('invalid token: {}'.format(token))
line = _trim_spaces(line)
if line:
yield line | Internal generator that iterates through an LXML tree and yields a tuple
per line. In this context, lines are blocks of text separated by <br> tags
or by block elements. The tuples contain the following elements:
- A tuple with the element reference (element, position) for the start
of the line. The tuple consists of:
- The LXML HTML element which references the line
- Whether the text starts at the beginning of the referenced element,
or after the closing tag
- A similar tuple indicating the ending of the line.
- The email indentation level, if detected.
- The plain (non-HTML) text of the line
If max_lines is specified, the generator stops after yielding the given
amount of lines.
For example, the HTML tree "<div>foo <span>bar</span><br>baz</div>" yields:
- ((<Element div>, 'begin'), (<Element br>, 'begin'), 0, 'foo bar')
- ((<Element br>, 'end'), (<Element div>, 'end'), 0, 'baz').
To illustrate the indentation level, the HTML tree
'<div><blockquote>hi</blockquote>world</div>' yields:
- ((<Element blockquote>, 'begin'), (<Element blockquote>, 'end'), 1, 'hi')
- ((<Element blockquote>, 'end'), (<Element div>, 'end'), 0, 'world') | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_html.py#L281-L372 | [
"def tree_token_generator(el, indentation_level=0):\n \"\"\"\n Internal generator that yields tokens for the given HTML element as\n follows:\n\n - A tuple (LXML element, BEGIN, indentation_level)\n - Text right after the start of the tag, or None.\n - Recursively calls the token generator for all... | # HTML utils
import lxml.html
import lxml.etree
from ._patterns import FORWARD_LINE, FORWARD_STYLES, MULTIPLE_WHITESPACE_RE
INLINE_TAGS = ['a', 'b', 'em', 'i', 'strong', 'span', 'font', 'q',
'object', 'bdo', 'sub', 'sup', 'center', 'td', 'th']
BEGIN = 'begin'
END = 'end'
try:
string_class = basestring # Python 2.7
except NameError:
string_class = str # Python 3.x
def trim_tree_after(element, include_element=True):
"""
Removes the document tree following the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
el.tail = None
if el != element or include_element:
el = el.getnext()
while el is not None:
remove_el = el
el = el.getnext()
parent_el.remove(remove_el)
el = parent_el
def trim_tree_before(element, include_element=True, keep_head=True):
"""
Removes the document tree preceding the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
parent_el.text = None
if el != element or include_element:
el = el.getprevious()
else:
parent_el.text = el.tail
while el is not None:
remove_el = el
el = el.getprevious()
tag = remove_el.tag
is_head = isinstance(tag, string_class) and tag.lower() == 'head'
if not keep_head or not is_head:
parent_el.remove(remove_el)
el = parent_el
def trim_slice(lines, slice_tuple):
"""
Trim a slice tuple (begin, end) so it starts at the first non-empty line
(obtained via indented_tree_line_generator / get_line_info) and ends at the
last non-empty line within the slice. Returns the new slice.
"""
def _empty(line):
return not line or line.strip() == '>'
if not slice_tuple:
return None
slice_start, slice_end = slice_tuple
if slice_start is None:
slice_start = 0
if slice_end is None:
slice_end = len(lines)
# Trim from beginning
while slice_start < slice_end and _empty(lines[slice_start]):
slice_start += 1
# Trim from end
while slice_end > slice_start and _empty(lines[slice_end-1]):
slice_end -= 1
return (slice_start, slice_end)
def unindent_tree(element):
"""
Removes the outermost indent. For example, the tree
"<div>A<blockqote>B<div>C<blockquote>D</blockquote>E</div>F</blockquote>G</div>"
is transformed to
"<div>A<div>B<div>C<blockquote>D</blockquote>E</div>F</div>G</div>"
"""
for el in element.iter():
if is_indentation_element(el):
el.attrib.clear()
el.tag = 'div'
return
def slice_tree(tree, start_refs, end_refs, slice_tuple, html_copy=None):
"""
Slices the HTML tree with the given start_refs and end_refs (obtained via
get_line_info) at the given slice_tuple, a tuple (start, end) containing
the start and end of the slice (or None, to start from the start / end at
the end of the tree). If html_copy is specified, a new tree is constructed
from the given HTML (which must be the equal to the original tree's HTML*).
The resulting tree is returned.
*) The reason we have to specify the HTML is that we can't reliably
construct a copy of the tree using copy.copy() (see bug
https://bugs.launchpad.net/lxml/+bug/1562550).
"""
start_ref = None
end_ref = None
if slice_tuple:
slice_start, slice_end = slice_tuple
if ((slice_start is not None and slice_start >= len(start_refs)) or
(slice_end is not None and slice_end <= 0)):
return get_html_tree('')
if slice_start != None and slice_start <= 0:
slice_start = None
if slice_end != None and slice_end >= len(start_refs):
slice_end = None
else:
slice_start, slice_end = None, None
if slice_start is not None:
start_ref = start_refs[slice_start]
if slice_end is not None:
if slice_end < len(end_refs):
end_ref = end_refs[slice_end-1]
if html_copy is not None:
et = lxml.etree.ElementTree(tree)
new_tree = get_html_tree(html_copy)
if start_ref:
selector = et.getelementpath(start_ref[0])
start_ref = (new_tree.find(selector), start_ref[1])
if end_ref:
selector = et.getelementpath(end_ref[0])
end_ref = (new_tree.find(selector), end_ref[1])
else:
new_tree = tree
if start_ref:
include_start = (start_ref[1] == BEGIN)
if end_ref:
include_end = (end_ref[1] == END)
# If start_ref is the same as end_ref, and we don't include the element,
# we are removing the entire tree. We need to handle this separately,
# otherwise trim_tree_after won't work because it can't find the already
# removed reference.
if start_ref and end_ref and start_ref[0] == end_ref[0]:
if not include_start or not include_end:
return get_html_tree('')
if start_ref:
trim_tree_before(start_ref[0], include_element=include_start)
if end_ref:
trim_tree_after(end_ref[0], include_element=include_end)
return new_tree
def get_html_tree(html):
"""
Given the HTML string, returns a LXML tree object. The tree is wrapped in
<div> elements if it doesn't have a top level tag or parsing would
otherwise result in an error. The wrapping can be later removed with
strip_wrapping().
"""
parser = lxml.html.HTMLParser(encoding='utf-8')
html = html.encode('utf8')
try:
tree = lxml.html.fromstring(html, parser=parser)
except lxml.etree.Error:
# E.g. empty document. Use dummy <div>
tree = lxml.html.fromstring('<div></div>')
# If the document doesn't start with a top level tag, wrap it with a <div>
# that will be later stripped out for consistent behavior.
if tree.tag not in lxml.html.defs.top_level_tags:
html = b'<div>%s</div>' % html
tree = lxml.html.fromstring(html, parser=parser)
# HACK for Outlook emails, where tags like <o:p> are rendered as <p>. We
# can generally ignore these tags so we replace them with <span>, which
# doesn't cause a line break. Also, we can't look up the element path of
# tags that contain colons. When rendering the tree, we will restore the
# tag name.
for el in tree.iter():
if el.nsmap or (isinstance(el.tag, string_class) and ':' in el.tag):
if el.nsmap:
actual_tag_name = '{}:{}'.format(list(el.nsmap.keys())[0], el.tag)
else:
actual_tag_name = el.tag
el.tag = 'span'
el.attrib['__tag_name'] = actual_tag_name
return tree
def strip_wrapping(html):
"""
Removes the wrapping that might have resulted when using get_html_tree().
"""
if html.startswith('<div>') and html.endswith('</div>'):
html = html[5:-6]
return html.strip()
def render_html_tree(tree):
"""
Renders the given HTML tree, and strips any wrapping that was applied in
get_html_tree().
You should avoid further processing of the given tree after calling this
method because we modify namespaced tags here.
"""
# Restore any tag names that were changed in get_html_tree()
for el in tree.iter():
if '__tag_name' in el.attrib:
actual_tag_name = el.attrib.pop('__tag_name')
el.tag = actual_tag_name
html = lxml.html.tostring(tree, encoding='utf8').decode('utf8')
return strip_wrapping(html)
def is_indentation_element(element):
if isinstance(element.tag, string_class):
return element.tag.lower() == 'blockquote'
return False
def tree_token_generator(el, indentation_level=0):
"""
Internal generator that yields tokens for the given HTML element as
follows:
- A tuple (LXML element, BEGIN, indentation_level)
- Text right after the start of the tag, or None.
- Recursively calls the token generator for all child objects
- A tuple (LXML element, END, indentation_level)
- Text right after the end of the tag, or None.
"""
if not isinstance(el.tag, string_class):
return
tag_name = el.tag.lower()
is_indentation = is_indentation_element(el)
if is_indentation:
indentation_level += 1
yield (el, BEGIN, indentation_level)
yield el.text
for child in el.iterchildren():
for token in tree_token_generator(child, indentation_level):
yield token
if is_indentation:
indentation_level -= 1
yield (el, END, indentation_level)
yield el.tail
def indented_tree_line_generator(el, max_lines=None):
"""
Like tree_line_generator, but yields tuples (start_ref, end_ref, line),
where the line already takes the indentation into account by having "> "
prepended. If a line already starts with ">", it is escaped ("\\>"). This
makes it possible to reliably use methods that analyze plain text to detect
quoting.
"""
gen = tree_line_generator(el, max_lines)
for start_ref, end_ref, indentation_level, line in gen:
# Escape line
if line.startswith('>'):
line = '\\' + line
yield start_ref, end_ref, '> '*indentation_level + line
def get_line_info(tree, max_lines=None):
"""
Shortcut for indented_tree_line_generator() that returns an array of
start references, an array of corresponding end references (see
tree_line_generator() docs), and an array of corresponding lines.
"""
line_gen = indented_tree_line_generator(tree, max_lines=max_lines)
line_gen_result = list(zip(*line_gen))
if line_gen_result:
return line_gen_result
else:
return [], [], []
|
closeio/quotequail | quotequail/_html.py | indented_tree_line_generator | python | def indented_tree_line_generator(el, max_lines=None):
gen = tree_line_generator(el, max_lines)
for start_ref, end_ref, indentation_level, line in gen:
# Escape line
if line.startswith('>'):
line = '\\' + line
yield start_ref, end_ref, '> '*indentation_level + line | Like tree_line_generator, but yields tuples (start_ref, end_ref, line),
where the line already takes the indentation into account by having "> "
prepended. If a line already starts with ">", it is escaped ("\\>"). This
makes it possible to reliably use methods that analyze plain text to detect
quoting. | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_html.py#L374-L387 | [
"def tree_line_generator(el, max_lines=None):\n \"\"\"\n Internal generator that iterates through an LXML tree and yields a tuple\n per line. In this context, lines are blocks of text separated by <br> tags\n or by block elements. The tuples contain the following elements:\n\n - A tuple with the elem... | # HTML utils
import lxml.html
import lxml.etree
from ._patterns import FORWARD_LINE, FORWARD_STYLES, MULTIPLE_WHITESPACE_RE
INLINE_TAGS = ['a', 'b', 'em', 'i', 'strong', 'span', 'font', 'q',
'object', 'bdo', 'sub', 'sup', 'center', 'td', 'th']
BEGIN = 'begin'
END = 'end'
try:
string_class = basestring # Python 2.7
except NameError:
string_class = str # Python 3.x
def trim_tree_after(element, include_element=True):
"""
Removes the document tree following the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
el.tail = None
if el != element or include_element:
el = el.getnext()
while el is not None:
remove_el = el
el = el.getnext()
parent_el.remove(remove_el)
el = parent_el
def trim_tree_before(element, include_element=True, keep_head=True):
"""
Removes the document tree preceding the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
parent_el.text = None
if el != element or include_element:
el = el.getprevious()
else:
parent_el.text = el.tail
while el is not None:
remove_el = el
el = el.getprevious()
tag = remove_el.tag
is_head = isinstance(tag, string_class) and tag.lower() == 'head'
if not keep_head or not is_head:
parent_el.remove(remove_el)
el = parent_el
def trim_slice(lines, slice_tuple):
"""
Trim a slice tuple (begin, end) so it starts at the first non-empty line
(obtained via indented_tree_line_generator / get_line_info) and ends at the
last non-empty line within the slice. Returns the new slice.
"""
def _empty(line):
return not line or line.strip() == '>'
if not slice_tuple:
return None
slice_start, slice_end = slice_tuple
if slice_start is None:
slice_start = 0
if slice_end is None:
slice_end = len(lines)
# Trim from beginning
while slice_start < slice_end and _empty(lines[slice_start]):
slice_start += 1
# Trim from end
while slice_end > slice_start and _empty(lines[slice_end-1]):
slice_end -= 1
return (slice_start, slice_end)
def unindent_tree(element):
"""
Removes the outermost indent. For example, the tree
"<div>A<blockqote>B<div>C<blockquote>D</blockquote>E</div>F</blockquote>G</div>"
is transformed to
"<div>A<div>B<div>C<blockquote>D</blockquote>E</div>F</div>G</div>"
"""
for el in element.iter():
if is_indentation_element(el):
el.attrib.clear()
el.tag = 'div'
return
def slice_tree(tree, start_refs, end_refs, slice_tuple, html_copy=None):
"""
Slices the HTML tree with the given start_refs and end_refs (obtained via
get_line_info) at the given slice_tuple, a tuple (start, end) containing
the start and end of the slice (or None, to start from the start / end at
the end of the tree). If html_copy is specified, a new tree is constructed
from the given HTML (which must be the equal to the original tree's HTML*).
The resulting tree is returned.
*) The reason we have to specify the HTML is that we can't reliably
construct a copy of the tree using copy.copy() (see bug
https://bugs.launchpad.net/lxml/+bug/1562550).
"""
start_ref = None
end_ref = None
if slice_tuple:
slice_start, slice_end = slice_tuple
if ((slice_start is not None and slice_start >= len(start_refs)) or
(slice_end is not None and slice_end <= 0)):
return get_html_tree('')
if slice_start != None and slice_start <= 0:
slice_start = None
if slice_end != None and slice_end >= len(start_refs):
slice_end = None
else:
slice_start, slice_end = None, None
if slice_start is not None:
start_ref = start_refs[slice_start]
if slice_end is not None:
if slice_end < len(end_refs):
end_ref = end_refs[slice_end-1]
if html_copy is not None:
et = lxml.etree.ElementTree(tree)
new_tree = get_html_tree(html_copy)
if start_ref:
selector = et.getelementpath(start_ref[0])
start_ref = (new_tree.find(selector), start_ref[1])
if end_ref:
selector = et.getelementpath(end_ref[0])
end_ref = (new_tree.find(selector), end_ref[1])
else:
new_tree = tree
if start_ref:
include_start = (start_ref[1] == BEGIN)
if end_ref:
include_end = (end_ref[1] == END)
# If start_ref is the same as end_ref, and we don't include the element,
# we are removing the entire tree. We need to handle this separately,
# otherwise trim_tree_after won't work because it can't find the already
# removed reference.
if start_ref and end_ref and start_ref[0] == end_ref[0]:
if not include_start or not include_end:
return get_html_tree('')
if start_ref:
trim_tree_before(start_ref[0], include_element=include_start)
if end_ref:
trim_tree_after(end_ref[0], include_element=include_end)
return new_tree
def get_html_tree(html):
"""
Given the HTML string, returns a LXML tree object. The tree is wrapped in
<div> elements if it doesn't have a top level tag or parsing would
otherwise result in an error. The wrapping can be later removed with
strip_wrapping().
"""
parser = lxml.html.HTMLParser(encoding='utf-8')
html = html.encode('utf8')
try:
tree = lxml.html.fromstring(html, parser=parser)
except lxml.etree.Error:
# E.g. empty document. Use dummy <div>
tree = lxml.html.fromstring('<div></div>')
# If the document doesn't start with a top level tag, wrap it with a <div>
# that will be later stripped out for consistent behavior.
if tree.tag not in lxml.html.defs.top_level_tags:
html = b'<div>%s</div>' % html
tree = lxml.html.fromstring(html, parser=parser)
# HACK for Outlook emails, where tags like <o:p> are rendered as <p>. We
# can generally ignore these tags so we replace them with <span>, which
# doesn't cause a line break. Also, we can't look up the element path of
# tags that contain colons. When rendering the tree, we will restore the
# tag name.
for el in tree.iter():
if el.nsmap or (isinstance(el.tag, string_class) and ':' in el.tag):
if el.nsmap:
actual_tag_name = '{}:{}'.format(list(el.nsmap.keys())[0], el.tag)
else:
actual_tag_name = el.tag
el.tag = 'span'
el.attrib['__tag_name'] = actual_tag_name
return tree
def strip_wrapping(html):
"""
Removes the wrapping that might have resulted when using get_html_tree().
"""
if html.startswith('<div>') and html.endswith('</div>'):
html = html[5:-6]
return html.strip()
def render_html_tree(tree):
"""
Renders the given HTML tree, and strips any wrapping that was applied in
get_html_tree().
You should avoid further processing of the given tree after calling this
method because we modify namespaced tags here.
"""
# Restore any tag names that were changed in get_html_tree()
for el in tree.iter():
if '__tag_name' in el.attrib:
actual_tag_name = el.attrib.pop('__tag_name')
el.tag = actual_tag_name
html = lxml.html.tostring(tree, encoding='utf8').decode('utf8')
return strip_wrapping(html)
def is_indentation_element(element):
if isinstance(element.tag, string_class):
return element.tag.lower() == 'blockquote'
return False
def tree_token_generator(el, indentation_level=0):
"""
Internal generator that yields tokens for the given HTML element as
follows:
- A tuple (LXML element, BEGIN, indentation_level)
- Text right after the start of the tag, or None.
- Recursively calls the token generator for all child objects
- A tuple (LXML element, END, indentation_level)
- Text right after the end of the tag, or None.
"""
if not isinstance(el.tag, string_class):
return
tag_name = el.tag.lower()
is_indentation = is_indentation_element(el)
if is_indentation:
indentation_level += 1
yield (el, BEGIN, indentation_level)
yield el.text
for child in el.iterchildren():
for token in tree_token_generator(child, indentation_level):
yield token
if is_indentation:
indentation_level -= 1
yield (el, END, indentation_level)
yield el.tail
def tree_line_generator(el, max_lines=None):
"""
Internal generator that iterates through an LXML tree and yields a tuple
per line. In this context, lines are blocks of text separated by <br> tags
or by block elements. The tuples contain the following elements:
- A tuple with the element reference (element, position) for the start
of the line. The tuple consists of:
- The LXML HTML element which references the line
- Whether the text starts at the beginning of the referenced element,
or after the closing tag
- A similar tuple indicating the ending of the line.
- The email indentation level, if detected.
- The plain (non-HTML) text of the line
If max_lines is specified, the generator stops after yielding the given
amount of lines.
For example, the HTML tree "<div>foo <span>bar</span><br>baz</div>" yields:
- ((<Element div>, 'begin'), (<Element br>, 'begin'), 0, 'foo bar')
- ((<Element br>, 'end'), (<Element div>, 'end'), 0, 'baz').
To illustrate the indentation level, the HTML tree
'<div><blockquote>hi</blockquote>world</div>' yields:
- ((<Element blockquote>, 'begin'), (<Element blockquote>, 'end'), 1, 'hi')
- ((<Element blockquote>, 'end'), (<Element div>, 'end'), 0, 'world')
"""
def _trim_spaces(text):
return MULTIPLE_WHITESPACE_RE.sub(' ', text).strip()
counter = 1
if max_lines != None and counter > max_lines:
return
# Buffer for the current line.
line = ''
# The reference tuple (element, position) for the start of the line.
start_ref = None
# The indentation level at the start of the line.
start_indentation_level = None
for token in tree_token_generator(el):
if token is None:
continue
elif isinstance(token, tuple):
el, state, indentation_level = token
tag_name = el.tag.lower()
line_break = (tag_name == 'br' and state == BEGIN)
is_block = (tag_name not in INLINE_TAGS)
is_forward = (is_block and state == BEGIN and
el.attrib.get('style') in FORWARD_STYLES)
if is_block or line_break:
line = _trim_spaces(line)
if line or line_break or is_forward:
end_ref = (el, state)
yield start_ref, end_ref, start_indentation_level, line
counter += 1
if max_lines != None and counter > max_lines:
return
line = ''
if is_forward:
# Simulate forward
yield (end_ref, end_ref, start_indentation_level,
FORWARD_LINE)
counter += 1
if max_lines != None and counter > max_lines:
return
if not line:
start_ref = (el, state)
start_indentation_level = indentation_level
elif isinstance(token, string_class):
line += token
else:
raise RuntimeError('invalid token: {}'.format(token))
line = _trim_spaces(line)
if line:
yield line
def get_line_info(tree, max_lines=None):
"""
Shortcut for indented_tree_line_generator() that returns an array of
start references, an array of corresponding end references (see
tree_line_generator() docs), and an array of corresponding lines.
"""
line_gen = indented_tree_line_generator(tree, max_lines=max_lines)
line_gen_result = list(zip(*line_gen))
if line_gen_result:
return line_gen_result
else:
return [], [], []
|
closeio/quotequail | quotequail/_html.py | get_line_info | python | def get_line_info(tree, max_lines=None):
line_gen = indented_tree_line_generator(tree, max_lines=max_lines)
line_gen_result = list(zip(*line_gen))
if line_gen_result:
return line_gen_result
else:
return [], [], [] | Shortcut for indented_tree_line_generator() that returns an array of
start references, an array of corresponding end references (see
tree_line_generator() docs), and an array of corresponding lines. | train | https://github.com/closeio/quotequail/blob/8a3960c033d595b25a8bbc2c340be898e3065b5f/quotequail/_html.py#L389-L400 | [
"def indented_tree_line_generator(el, max_lines=None):\n \"\"\"\n Like tree_line_generator, but yields tuples (start_ref, end_ref, line),\n where the line already takes the indentation into account by having \"> \"\n prepended. If a line already starts with \">\", it is escaped (\"\\\\>\"). This\n ma... | # HTML utils
import lxml.html
import lxml.etree
from ._patterns import FORWARD_LINE, FORWARD_STYLES, MULTIPLE_WHITESPACE_RE
INLINE_TAGS = ['a', 'b', 'em', 'i', 'strong', 'span', 'font', 'q',
'object', 'bdo', 'sub', 'sup', 'center', 'td', 'th']
BEGIN = 'begin'
END = 'end'
try:
string_class = basestring # Python 2.7
except NameError:
string_class = str # Python 3.x
def trim_tree_after(element, include_element=True):
"""
Removes the document tree following the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
el.tail = None
if el != element or include_element:
el = el.getnext()
while el is not None:
remove_el = el
el = el.getnext()
parent_el.remove(remove_el)
el = parent_el
def trim_tree_before(element, include_element=True, keep_head=True):
"""
Removes the document tree preceding the given element. If include_element
is True, the given element is kept in the tree, otherwise it is removed.
"""
el = element
for parent_el in element.iterancestors():
parent_el.text = None
if el != element or include_element:
el = el.getprevious()
else:
parent_el.text = el.tail
while el is not None:
remove_el = el
el = el.getprevious()
tag = remove_el.tag
is_head = isinstance(tag, string_class) and tag.lower() == 'head'
if not keep_head or not is_head:
parent_el.remove(remove_el)
el = parent_el
def trim_slice(lines, slice_tuple):
"""
Trim a slice tuple (begin, end) so it starts at the first non-empty line
(obtained via indented_tree_line_generator / get_line_info) and ends at the
last non-empty line within the slice. Returns the new slice.
"""
def _empty(line):
return not line or line.strip() == '>'
if not slice_tuple:
return None
slice_start, slice_end = slice_tuple
if slice_start is None:
slice_start = 0
if slice_end is None:
slice_end = len(lines)
# Trim from beginning
while slice_start < slice_end and _empty(lines[slice_start]):
slice_start += 1
# Trim from end
while slice_end > slice_start and _empty(lines[slice_end-1]):
slice_end -= 1
return (slice_start, slice_end)
def unindent_tree(element):
"""
Removes the outermost indent. For example, the tree
"<div>A<blockqote>B<div>C<blockquote>D</blockquote>E</div>F</blockquote>G</div>"
is transformed to
"<div>A<div>B<div>C<blockquote>D</blockquote>E</div>F</div>G</div>"
"""
for el in element.iter():
if is_indentation_element(el):
el.attrib.clear()
el.tag = 'div'
return
def slice_tree(tree, start_refs, end_refs, slice_tuple, html_copy=None):
"""
Slices the HTML tree with the given start_refs and end_refs (obtained via
get_line_info) at the given slice_tuple, a tuple (start, end) containing
the start and end of the slice (or None, to start from the start / end at
the end of the tree). If html_copy is specified, a new tree is constructed
from the given HTML (which must be the equal to the original tree's HTML*).
The resulting tree is returned.
*) The reason we have to specify the HTML is that we can't reliably
construct a copy of the tree using copy.copy() (see bug
https://bugs.launchpad.net/lxml/+bug/1562550).
"""
start_ref = None
end_ref = None
if slice_tuple:
slice_start, slice_end = slice_tuple
if ((slice_start is not None and slice_start >= len(start_refs)) or
(slice_end is not None and slice_end <= 0)):
return get_html_tree('')
if slice_start != None and slice_start <= 0:
slice_start = None
if slice_end != None and slice_end >= len(start_refs):
slice_end = None
else:
slice_start, slice_end = None, None
if slice_start is not None:
start_ref = start_refs[slice_start]
if slice_end is not None:
if slice_end < len(end_refs):
end_ref = end_refs[slice_end-1]
if html_copy is not None:
et = lxml.etree.ElementTree(tree)
new_tree = get_html_tree(html_copy)
if start_ref:
selector = et.getelementpath(start_ref[0])
start_ref = (new_tree.find(selector), start_ref[1])
if end_ref:
selector = et.getelementpath(end_ref[0])
end_ref = (new_tree.find(selector), end_ref[1])
else:
new_tree = tree
if start_ref:
include_start = (start_ref[1] == BEGIN)
if end_ref:
include_end = (end_ref[1] == END)
# If start_ref is the same as end_ref, and we don't include the element,
# we are removing the entire tree. We need to handle this separately,
# otherwise trim_tree_after won't work because it can't find the already
# removed reference.
if start_ref and end_ref and start_ref[0] == end_ref[0]:
if not include_start or not include_end:
return get_html_tree('')
if start_ref:
trim_tree_before(start_ref[0], include_element=include_start)
if end_ref:
trim_tree_after(end_ref[0], include_element=include_end)
return new_tree
def get_html_tree(html):
"""
Given the HTML string, returns a LXML tree object. The tree is wrapped in
<div> elements if it doesn't have a top level tag or parsing would
otherwise result in an error. The wrapping can be later removed with
strip_wrapping().
"""
parser = lxml.html.HTMLParser(encoding='utf-8')
html = html.encode('utf8')
try:
tree = lxml.html.fromstring(html, parser=parser)
except lxml.etree.Error:
# E.g. empty document. Use dummy <div>
tree = lxml.html.fromstring('<div></div>')
# If the document doesn't start with a top level tag, wrap it with a <div>
# that will be later stripped out for consistent behavior.
if tree.tag not in lxml.html.defs.top_level_tags:
html = b'<div>%s</div>' % html
tree = lxml.html.fromstring(html, parser=parser)
# HACK for Outlook emails, where tags like <o:p> are rendered as <p>. We
# can generally ignore these tags so we replace them with <span>, which
# doesn't cause a line break. Also, we can't look up the element path of
# tags that contain colons. When rendering the tree, we will restore the
# tag name.
for el in tree.iter():
if el.nsmap or (isinstance(el.tag, string_class) and ':' in el.tag):
if el.nsmap:
actual_tag_name = '{}:{}'.format(list(el.nsmap.keys())[0], el.tag)
else:
actual_tag_name = el.tag
el.tag = 'span'
el.attrib['__tag_name'] = actual_tag_name
return tree
def strip_wrapping(html):
"""
Removes the wrapping that might have resulted when using get_html_tree().
"""
if html.startswith('<div>') and html.endswith('</div>'):
html = html[5:-6]
return html.strip()
def render_html_tree(tree):
"""
Renders the given HTML tree, and strips any wrapping that was applied in
get_html_tree().
You should avoid further processing of the given tree after calling this
method because we modify namespaced tags here.
"""
# Restore any tag names that were changed in get_html_tree()
for el in tree.iter():
if '__tag_name' in el.attrib:
actual_tag_name = el.attrib.pop('__tag_name')
el.tag = actual_tag_name
html = lxml.html.tostring(tree, encoding='utf8').decode('utf8')
return strip_wrapping(html)
def is_indentation_element(element):
if isinstance(element.tag, string_class):
return element.tag.lower() == 'blockquote'
return False
def tree_token_generator(el, indentation_level=0):
"""
Internal generator that yields tokens for the given HTML element as
follows:
- A tuple (LXML element, BEGIN, indentation_level)
- Text right after the start of the tag, or None.
- Recursively calls the token generator for all child objects
- A tuple (LXML element, END, indentation_level)
- Text right after the end of the tag, or None.
"""
if not isinstance(el.tag, string_class):
return
tag_name = el.tag.lower()
is_indentation = is_indentation_element(el)
if is_indentation:
indentation_level += 1
yield (el, BEGIN, indentation_level)
yield el.text
for child in el.iterchildren():
for token in tree_token_generator(child, indentation_level):
yield token
if is_indentation:
indentation_level -= 1
yield (el, END, indentation_level)
yield el.tail
def tree_line_generator(el, max_lines=None):
"""
Internal generator that iterates through an LXML tree and yields a tuple
per line. In this context, lines are blocks of text separated by <br> tags
or by block elements. The tuples contain the following elements:
- A tuple with the element reference (element, position) for the start
of the line. The tuple consists of:
- The LXML HTML element which references the line
- Whether the text starts at the beginning of the referenced element,
or after the closing tag
- A similar tuple indicating the ending of the line.
- The email indentation level, if detected.
- The plain (non-HTML) text of the line
If max_lines is specified, the generator stops after yielding the given
amount of lines.
For example, the HTML tree "<div>foo <span>bar</span><br>baz</div>" yields:
- ((<Element div>, 'begin'), (<Element br>, 'begin'), 0, 'foo bar')
- ((<Element br>, 'end'), (<Element div>, 'end'), 0, 'baz').
To illustrate the indentation level, the HTML tree
'<div><blockquote>hi</blockquote>world</div>' yields:
- ((<Element blockquote>, 'begin'), (<Element blockquote>, 'end'), 1, 'hi')
- ((<Element blockquote>, 'end'), (<Element div>, 'end'), 0, 'world')
"""
def _trim_spaces(text):
return MULTIPLE_WHITESPACE_RE.sub(' ', text).strip()
counter = 1
if max_lines != None and counter > max_lines:
return
# Buffer for the current line.
line = ''
# The reference tuple (element, position) for the start of the line.
start_ref = None
# The indentation level at the start of the line.
start_indentation_level = None
for token in tree_token_generator(el):
if token is None:
continue
elif isinstance(token, tuple):
el, state, indentation_level = token
tag_name = el.tag.lower()
line_break = (tag_name == 'br' and state == BEGIN)
is_block = (tag_name not in INLINE_TAGS)
is_forward = (is_block and state == BEGIN and
el.attrib.get('style') in FORWARD_STYLES)
if is_block or line_break:
line = _trim_spaces(line)
if line or line_break or is_forward:
end_ref = (el, state)
yield start_ref, end_ref, start_indentation_level, line
counter += 1
if max_lines != None and counter > max_lines:
return
line = ''
if is_forward:
# Simulate forward
yield (end_ref, end_ref, start_indentation_level,
FORWARD_LINE)
counter += 1
if max_lines != None and counter > max_lines:
return
if not line:
start_ref = (el, state)
start_indentation_level = indentation_level
elif isinstance(token, string_class):
line += token
else:
raise RuntimeError('invalid token: {}'.format(token))
line = _trim_spaces(line)
if line:
yield line
def indented_tree_line_generator(el, max_lines=None):
"""
Like tree_line_generator, but yields tuples (start_ref, end_ref, line),
where the line already takes the indentation into account by having "> "
prepended. If a line already starts with ">", it is escaped ("\\>"). This
makes it possible to reliably use methods that analyze plain text to detect
quoting.
"""
gen = tree_line_generator(el, max_lines)
for start_ref, end_ref, indentation_level, line in gen:
# Escape line
if line.startswith('>'):
line = '\\' + line
yield start_ref, end_ref, '> '*indentation_level + line
|
jochym/Elastic | elastic/elastic.py | regular | python | def regular(u):
'''
Equation matrix generation for the regular (cubic) lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{12}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, uyy + uzz, 0],
[uyy, uxx + uzz, 0],
[uzz, uxx + uyy, 0],
[0, 0, 2*uyz],
[0, 0, 2*uxz],
[0, 0, 2*uxy]]) | Equation matrix generation for the regular (cubic) lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{12}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/elastic/elastic.py#L95-L115 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 1998-2017 by Paweł T. Jochym <pawel.jochym@ifj.edu.pl>
#
# This file is part of Elastic.
# Elastic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Elastic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Elastic. If not, see <http://www.gnu.org/licenses/>.
'''
.. _elastic-mod:
Elastic Module
^^^^^^^^^^^^^^
Elastic is a module for calculation of :math:`C_{ij}` components of elastic
tensor from the strain-stress relation.
The strain components here are ordered in standard way which is different
to ordering in previous versions of the code (up to 4.0).
The ordering is: :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}`.
The general ordering of :math:`C_{ij}` components is (except for triclinic
symmetry and taking into account customary names of constants - e.g.
:math:`C_{16} \\rightarrow C_{14}`):
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
The functions with the name of bravais lattices define the symmetry of the
:math:`C_{ij}` matrix. The matrix is N columns by 6 rows where the columns
corespond to independent elastic constants of the given crystal, while the rows
corespond to the canonical deformations of a crystal. The elements are the
second partial derivatives of the free energy formula for the crystal written
down as a quadratic form of the deformations with respect to elastic constant
and deformation.
*Note:*
The elements for deformations :math:`u_{xy}, u_{xz}, u_{yz}`
have to be divided by 2 to properly match the usual definition
of elastic constants.
See: [LL]_ L.D. Landau, E.M. Lifszyc, "Theory of elasticity"
There is some usefull summary also at:
`ScienceWorld <http://scienceworld.wolfram.com/physics/Elasticity.html>`_
---------
'''
from __future__ import print_function, division, absolute_import
import re
from ase.atoms import Atoms
try:
# Try new release of spglib
import spglib as spg
except ImportError:
# Old naming scheme
from pyspglib import spglib as spg
from scipy.linalg import norm, lstsq
from scipy import optimize
from numpy.linalg import inv
from numpy import dot, diag, ones, reshape, linspace, array, mean
from math import acos, pi, cos, sin, sqrt
def BMEOS(v, v0, b0, b0p):
return (b0/b0p)*(pow(v0/v, b0p) - 1)
def ctg(x):
return cos(x)/sin(x)
def csc(x):
return 1/sin(x)
def tetragonal(u):
'''
Equation matrix generation for the tetragonal lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, uyy, uzz, 0, 0],
[uyy, 0, uxx, uzz, 0, 0],
[0, uzz, 0, uxx+uyy, 0, 0],
[0, 0, 0, 0, 0, 2*uxy],
[0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 2*uyz, 0]])
def orthorombic(u):
'''
Equation matrix generation for the orthorombic lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0, uyy, uzz, 0, 0, 0, 0],
[0, uyy, 0, uxx, 0, uzz, 0, 0, 0],
[0, 0, uzz, 0, uxx, uyy, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 2*uyz, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 2*uxy]])
def trigonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Not tested yet.
# TODO: There is still some doubt about the :math:`C_{14}` constant.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0, 2*uxz ],
[ uyy, 0, uxx, uzz, 0, -2*uxz ],
[ 0, uzz, 0, uxx+uyy, 0, 0 ],
[ 0, 0, 0, 0, 2*uyz, -4*uxy ],
[ 0, 0, 0, 0, 2*uxz, 2*(uxx-uyy)],
[ 2*uxy, 0, -2*uxy, 0, 0, -4*uyz ]])
def hexagonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Still needs good verification
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0 ],
[ uyy, 0, uxx, uzz, 0 ],
[ 0, uzz, 0, uxx+uyy, 0 ],
[ 0, 0, 0, 0, 2*uyz ],
[ 0, 0, 0, 0, 2*uxz ],
[ 2*uxy, 0, -2*uxy, 0, 0 ]])
def monoclinic(u):
'''Monoclinic group,
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz, 0]])
def triclinic(u):
'''Triclinic crystals.
*Note*: This was never tested on the real case. Beware!
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33},
C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66},
C_{16}, C_{26}, C_{36}, C_{46}, C_{56},
C_{14}, C_{15}, C_{25}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# Based on the monoclinic matrix and not tested on real case.
# If you have test cases for this symmetry send them to the author.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0,uyz,uxz, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0,uxz, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxy, 0,uxx, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0, 0,uxy, 0,uxx,uyy,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz,uyz,uxz, 0, 0, 0, 0]])
def get_cij_order(cryst):
'''Give order of of elastic constants for the structure
:param cryst: ASE Atoms object
:returns: Order of elastic constants as a tuple of strings: C_ij
'''
orders = {
1: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36',
'C_46', 'C_56', 'C_14', 'C_15', 'C_25', 'C_45'),
2: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36', 'C_45'),
3: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23', 'C_44',
'C_55', 'C_66'),
4: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
5: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
6: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44'),
7: ('C_11', 'C_12', 'C_44'),
}
return orders[get_lattice_type(cryst)[0]]
def get_lattice_type(cryst):
'''Find the symmetry of the crystal using spglib symmetry finder.
Derive name of the space group and its number extracted from the result.
Based on the group number identify also the lattice type and the Bravais
lattice of the crystal. The lattice type numbers are
(the numbering starts from 1):
Triclinic (1), Monoclinic (2), Orthorombic (3),
Tetragonal (4), Trigonal (5), Hexagonal (6), Cubic (7)
:param cryst: ASE Atoms object
:returns: tuple (lattice type number (1-7), lattice name, space group
name, space group number)
'''
# Table of lattice types and correcponding group numbers dividing
# the ranges. See get_lattice_type method for precise definition.
lattice_types = [
[3, "Triclinic"],
[16, "Monoclinic"],
[75, "Orthorombic"],
[143, "Tetragonal"],
[168, "Trigonal"],
[195, "Hexagonal"],
[231, "Cubic"]
]
sg = spg.get_spacegroup(cryst)
m = re.match(r'([A-Z].*\b)\s*\(([0-9]*)\)', sg)
sg_name = m.group(1)
sg_nr = int(m.group(2))
for n, l in enumerate(lattice_types):
if sg_nr < l[0]:
bravais = l[1]
lattype = n+1
break
return lattype, bravais, sg_name, sg_nr
def get_bulk_modulus(cryst):
'''Calculate bulk modulus using the Birch-Murnaghan equation of state.
The EOS must be previously calculated by get_BM_EOS routine.
The returned bulk modulus is a :math:`B_0` coefficient of the B-M EOS.
The units of the result are defined by ASE. To get the result in
any particular units (e.g. GPa) you need to divide it by
ase.units.<unit name>::
get_bulk_modulus(cryst)/ase.units.GPa
:param cryst: ASE Atoms object
:returns: float, bulk modulus :math:`B_0` in ASE units.
'''
if getattr(cryst, 'bm_eos', None) is None:
raise RuntimeError('Missing B-M EOS data.')
cryst.bulk_modulus = cryst.bm_eos[1]
return cryst.bulk_modulus
def get_pressure(s):
'''Return *external* isotropic (hydrostatic) pressure in ASE units.
If the pressure is positive the system is under external pressure.
This is a convenience function to convert output of get_stress function
into external pressure.
:param cryst: stress tensor in Voight (vector) notation as returned by
the get_stress() method.
:returns: float, external hydrostatic pressure in ASE units.
'''
return -mean(s[:3])
def get_BM_EOS(cryst, systems):
"""Calculate Birch-Murnaghan Equation of State for the crystal.
The B-M equation of state is defined by:
.. math::
P(V)= \\frac{B_0}{B'_0}\\left[
\\left({\\frac{V}{V_0}}\\right)^{-B'_0} - 1
\\right]
It's coefficients are estimated using n single-point structures ganerated
from the crystal (cryst) by the scan_volumes function between two relative
volumes. The BM EOS is fitted to the computed points by
least squares method. The returned value is a list of fitted
parameters: :math:`V_0, B_0, B_0'` if the fit succeded.
If the fitting fails the ``RuntimeError('Calculation failed')`` is raised.
The data from the calculation and fit is stored in the bm_eos and pv
members of cryst for future reference. You have to provide properly
optimized structures in cryst and systems list.
:param cryst: Atoms object, basic structure
:param systems: A list of calculated structures
:returns: tuple of EOS parameters :math:`V_0, B_0, B_0'`.
"""
pvdat = array([[r.get_volume(),
get_pressure(r.get_stress()),
norm(r.get_cell()[:, 0]),
norm(r.get_cell()[:, 1]),
norm(r.get_cell()[:, 2])] for r in systems]).T
# Estimate the initial guess assuming b0p=1
# Limiting volumes
v1 = min(pvdat[0])
v2 = max(pvdat[0])
# The pressure is falling with the growing volume
p2 = min(pvdat[1])
p1 = max(pvdat[1])
b0 = (p1*v1-p2*v2)/(v2-v1)
v0 = v1*(p1+b0)/b0
# Initial guess
p0 = [v0, b0, 1]
# Fitting
try :
p1, succ = optimize.curve_fit(BMEOS, pvdat[0], pvdat[1], p0)
except (ValueError, RuntimeError, optimize.OptimizeWarning) as ex:
raise RuntimeError('Calculation failed')
cryst.bm_eos = p1
cryst.pv = pvdat
return cryst.bm_eos
def get_elementary_deformations(cryst, n=5, d=2):
'''Generate elementary deformations for elastic tensor calculation.
The deformations are created based on the symmetry of the crystal and
are limited to the non-equivalet axes of the crystal.
:param cryst: Atoms object, basic structure
:param n: integer, number of deformations per non-equivalent axis
:param d: float, size of the maximum deformation in percent and degrees
:returns: list of deformed structures
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
systems = []
for a in axis:
if a < 3: # tetragonal deformation
for dx in linspace(-d, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
elif a < 6: # sheer deformation (skip the zero angle)
for dx in linspace(d/10.0, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
return systems
def get_elastic_tensor(cryst, systems):
'''Calculate elastic tensor of the crystal.
The elastic tensor is calculated from the stress-strain relation
and derived by fitting this relation to the set of linear equations
build from the symmetry of the crystal and strains and stresses
of the set of elementary deformations of the unit cell.
It is assumed that the crystal is converged and optimized
under intended pressure/stress. The geometry and stress on the
cryst is taken as the reference point. No additional optimization
will be run. Structures in cryst and systems list must have calculated
stresses. The function returns tuple of :math:`C_{ij}` elastic tensor,
raw Birch coefficients :math:`B_{ij}` and fitting results: residuals,
solution rank, singular values returned by numpy.linalg.lstsq.
:param cryst: Atoms object, basic structure
:param systems: list of Atoms object with calculated deformed structures
:returns: tuple(:math:`C_{ij}` float vector,
tuple(:math:`B_{ij}` float vector, residuals, solution rank, singular values))
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
ul = []
sl = []
p = get_pressure(cryst.get_stress())
for g in systems:
ul.append(get_strain(g, refcell=cryst))
# Remove the ambient pressure from the stress tensor
sl.append(g.get_stress()-array([p, p, p, 0, 0, 0]))
# print(symm, ul)
eqm = array([symm(u) for u in ul])
# print(eqm)
# print(eqm[0].shape, eqm.shape)
eqm = reshape(eqm, (eqm.shape[0]*eqm.shape[1], eqm.shape[2]))
# print(eqm)
slm = reshape(array(sl), (-1,))
# print(eqm.shape, slm.shape)
# print(slm)
Bij = lstsq(eqm, slm)
# print(Bij[0] / units.GPa)
# Calculate elastic constants from Birch coeff.
# TODO: Check the sign of the pressure array in the B <=> C relation
if (symm == orthorombic):
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p])
elif (symm == tetragonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, -p])
elif (symm == regular):
Cij = Bij[0] - array([-p, p, -p])
elif (symm == trigonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, p])
elif (symm == hexagonal):
Cij = Bij[0] - array([-p, -p, p, p, -p])
elif (symm == monoclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p, p, p, p, p])
elif (symm == triclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p,
p, p, p, p, p, p, p, p, p])
return Cij, Bij
def scan_pressures(cryst, lo, hi, n=5, eos=None):
'''
Scan the pressure axis from lo to hi (inclusive)
using B-M EOS as the volume predictor.
Pressure (lo, hi) in GPa
'''
# Inverse B-M EOS to get volumes from pressures
# This will work only in limited pressure range p>-B/B'.
# Warning! Relative, the V0 prefactor is removed.
def invbmeos(b, bp, x):
return array([pow(b/(bp*xv+b), 1/(3*bp)) for xv in x])
if eos is None:
raise RuntimeError('Required EOS data missing')
# Limit negative pressures to 90% of the singularity value.
# Beyond this B-M EOS is bound to be wrong anyway.
lo = max(lo, -0.9*eos[1]/eos[2])
scale = (eos[0]/cryst.get_volume())*invbmeos(eos[1], eos[2],
linspace(lo, hi, num=n))
# print(scale)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def scan_volumes(cryst, lo=0.98, hi=1.02, n=5, scale_volumes=True):
'''
Provide set of crystals along volume axis from lo to hi (inclusive).
No volume cell optimization is performed. Bounds are specified as
fractions (1.10 = 10% increase). If scale_volumes==False the scalling
is applied to lattice vectors instead of volumes.
:param lo: lower bound of the V/V_0 in the scan
:param hi: upper bound of the V/V_0 in the scan
:param n: number of volume sample points
:param scale_volumes: If True scale the unit cell volume or,
if False, scale the length of lattice axes.
:returns: a list of deformed systems
'''
scale = linspace(lo, hi, num=n)
if scale_volumes:
scale **= (1.0/3.0)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def get_vecang_cell(cryst, uc=None):
'''
Compute A,B,C, alpha,beta,gamma cell params
from the unit cell matrix (uc) or cryst.
Angles in radians.
'''
if uc is None:
uc = cryst.get_cell()
ucv = [uc[i, :]/norm(uc[i, :]) for i in range(3)]
uca = [acos(dot(ucv[(i+1) % 3], ucv[(i+2) % 3])) for i in range(3)]
return [norm(uc[i, :]) for i in range(3)] + uca
def get_deformed_cell(base_cryst, axis=0, size=1):
'''
Return the cell (with atoms) deformed along one
cell parameter (0,1,2 = a,b,c ; 3,4,5 = alpha,beta,gamma) by
size percent or size degrees (axis/angles).
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
if axis < 3:
uc[axis, :] = (1+size/100.0)*uc[axis, :]
else:
(a, b, c, alp, bet, gam) = get_vecang_cell(cryst)
d = array([0.0, 0.0, 0.0])
d[axis-3] = pi*size/180
(alp, bet, gam) = array((alp, bet, gam))+d
t = 1 - (ctg(bet)*ctg(gam)-cos(alp)*csc(bet)*csc(gam))**2
if t < 0.0:
print('''
The parameters (alpha,beta,gamma)=(%f,%f,%f) are probably
incorrect and lead to imaginary coordinates.
This range of parameters is unsupported by this program
(and is, let me say, very strange for a crystal).
Cennot continue, bye.''' % (alp, bet, gam))
raise ValueError
else:
uc = [[a, 0.0, 0.0],
[b*cos(gam), b*sin(gam), 0],
[c*cos(bet),
c*(cos(alp)/sin(gam) - cos(bet)*ctg(gam)),
c*sin(bet)*sqrt(t)]]
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_cart_deformed_cell(base_cryst, axis=0, size=1):
'''Return the cell deformed along one of the cartesian directions
Creates new deformed structure. The deformation is based on the
base structure and is performed along single axis. The axis is
specified as follows: 0,1,2 = x,y,z ; sheers: 3,4,5 = yz, xz, xy.
The size of the deformation is in percent and degrees, respectively.
:param base_cryst: structure to be deformed
:param axis: direction of deformation
:param size: size of the deformation
:returns: new, deformed structure
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
s = size/100.0
L = diag(ones(3))
if axis < 3:
L[axis, axis] += s
else:
if axis == 3:
L[1, 2] += s
elif axis == 4:
L[0, 2] += s
else:
L[0, 1] += s
uc = dot(uc, L)
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_strain(cryst, refcell=None):
'''Calculate strain tensor in the Voight notation
Computes the strain tensor in the Voight notation as a conventional
6-vector. The calculation is done with respect to the crystal
geometry passed in refcell parameter.
:param cryst: deformed structure
:param refcell: reference, undeformed structure
:returns: 6-vector of strain tensor in the Voight notation
'''
if refcell is None:
refcell = cryst
du = cryst.get_cell()-refcell.get_cell()
m = refcell.get_cell()
m = inv(m)
u = dot(m, du)
u = (u+u.T)/2
return array([u[0, 0], u[1, 1], u[2, 2], u[2, 1], u[2, 0], u[1, 0]])
if __name__ == '__main__':
from ase.spacegroup import crystal
a = 4.194
cryst = crystal(['Mg', 'O'],
[(0, 0, 0), (0.5, 0.5, 0.5)],
spacegroup=225,
cellpar=[a, a, a, 90, 90, 90])
sl = scan_volumes(cryst)
print('Volumes: ', end='')
for c in sl:
print('%.2f (%.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end=' ')
print()
sl = get_elementary_deformations(cryst)
print('Structures: ')
print(' Vol A B C alph bet gam')
for n, c in enumerate(sl):
print('%.4f (%5.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end='')
print((3*' %7.4f' + ' ' + 3*' %7.2f') %
tuple(c.get_cell_lengths_and_angles()))
|
jochym/Elastic | elastic/elastic.py | get_lattice_type | python | def get_lattice_type(cryst):
'''Find the symmetry of the crystal using spglib symmetry finder.
Derive name of the space group and its number extracted from the result.
Based on the group number identify also the lattice type and the Bravais
lattice of the crystal. The lattice type numbers are
(the numbering starts from 1):
Triclinic (1), Monoclinic (2), Orthorombic (3),
Tetragonal (4), Trigonal (5), Hexagonal (6), Cubic (7)
:param cryst: ASE Atoms object
:returns: tuple (lattice type number (1-7), lattice name, space group
name, space group number)
'''
# Table of lattice types and correcponding group numbers dividing
# the ranges. See get_lattice_type method for precise definition.
lattice_types = [
[3, "Triclinic"],
[16, "Monoclinic"],
[75, "Orthorombic"],
[143, "Tetragonal"],
[168, "Trigonal"],
[195, "Hexagonal"],
[231, "Cubic"]
]
sg = spg.get_spacegroup(cryst)
m = re.match(r'([A-Z].*\b)\s*\(([0-9]*)\)', sg)
sg_name = m.group(1)
sg_nr = int(m.group(2))
for n, l in enumerate(lattice_types):
if sg_nr < l[0]:
bravais = l[1]
lattype = n+1
break
return lattype, bravais, sg_name, sg_nr | Find the symmetry of the crystal using spglib symmetry finder.
Derive name of the space group and its number extracted from the result.
Based on the group number identify also the lattice type and the Bravais
lattice of the crystal. The lattice type numbers are
(the numbering starts from 1):
Triclinic (1), Monoclinic (2), Orthorombic (3),
Tetragonal (4), Trigonal (5), Hexagonal (6), Cubic (7)
:param cryst: ASE Atoms object
:returns: tuple (lattice type number (1-7), lattice name, space group
name, space group number) | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/elastic/elastic.py#L305-L345 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 1998-2017 by Paweł T. Jochym <pawel.jochym@ifj.edu.pl>
#
# This file is part of Elastic.
# Elastic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Elastic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Elastic. If not, see <http://www.gnu.org/licenses/>.
'''
.. _elastic-mod:
Elastic Module
^^^^^^^^^^^^^^
Elastic is a module for calculation of :math:`C_{ij}` components of elastic
tensor from the strain-stress relation.
The strain components here are ordered in standard way which is different
to ordering in previous versions of the code (up to 4.0).
The ordering is: :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}`.
The general ordering of :math:`C_{ij}` components is (except for triclinic
symmetry and taking into account customary names of constants - e.g.
:math:`C_{16} \\rightarrow C_{14}`):
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
The functions with the name of bravais lattices define the symmetry of the
:math:`C_{ij}` matrix. The matrix is N columns by 6 rows where the columns
corespond to independent elastic constants of the given crystal, while the rows
corespond to the canonical deformations of a crystal. The elements are the
second partial derivatives of the free energy formula for the crystal written
down as a quadratic form of the deformations with respect to elastic constant
and deformation.
*Note:*
The elements for deformations :math:`u_{xy}, u_{xz}, u_{yz}`
have to be divided by 2 to properly match the usual definition
of elastic constants.
See: [LL]_ L.D. Landau, E.M. Lifszyc, "Theory of elasticity"
There is some usefull summary also at:
`ScienceWorld <http://scienceworld.wolfram.com/physics/Elasticity.html>`_
---------
'''
from __future__ import print_function, division, absolute_import
import re
from ase.atoms import Atoms
try:
# Try new release of spglib
import spglib as spg
except ImportError:
# Old naming scheme
from pyspglib import spglib as spg
from scipy.linalg import norm, lstsq
from scipy import optimize
from numpy.linalg import inv
from numpy import dot, diag, ones, reshape, linspace, array, mean
from math import acos, pi, cos, sin, sqrt
def BMEOS(v, v0, b0, b0p):
return (b0/b0p)*(pow(v0/v, b0p) - 1)
def ctg(x):
return cos(x)/sin(x)
def csc(x):
return 1/sin(x)
def regular(u):
'''
Equation matrix generation for the regular (cubic) lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{12}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, uyy + uzz, 0],
[uyy, uxx + uzz, 0],
[uzz, uxx + uyy, 0],
[0, 0, 2*uyz],
[0, 0, 2*uxz],
[0, 0, 2*uxy]])
def tetragonal(u):
'''
Equation matrix generation for the tetragonal lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, uyy, uzz, 0, 0],
[uyy, 0, uxx, uzz, 0, 0],
[0, uzz, 0, uxx+uyy, 0, 0],
[0, 0, 0, 0, 0, 2*uxy],
[0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 2*uyz, 0]])
def orthorombic(u):
'''
Equation matrix generation for the orthorombic lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0, uyy, uzz, 0, 0, 0, 0],
[0, uyy, 0, uxx, 0, uzz, 0, 0, 0],
[0, 0, uzz, 0, uxx, uyy, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 2*uyz, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 2*uxy]])
def trigonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Not tested yet.
# TODO: There is still some doubt about the :math:`C_{14}` constant.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0, 2*uxz ],
[ uyy, 0, uxx, uzz, 0, -2*uxz ],
[ 0, uzz, 0, uxx+uyy, 0, 0 ],
[ 0, 0, 0, 0, 2*uyz, -4*uxy ],
[ 0, 0, 0, 0, 2*uxz, 2*(uxx-uyy)],
[ 2*uxy, 0, -2*uxy, 0, 0, -4*uyz ]])
def hexagonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Still needs good verification
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0 ],
[ uyy, 0, uxx, uzz, 0 ],
[ 0, uzz, 0, uxx+uyy, 0 ],
[ 0, 0, 0, 0, 2*uyz ],
[ 0, 0, 0, 0, 2*uxz ],
[ 2*uxy, 0, -2*uxy, 0, 0 ]])
def monoclinic(u):
'''Monoclinic group,
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz, 0]])
def triclinic(u):
'''Triclinic crystals.
*Note*: This was never tested on the real case. Beware!
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33},
C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66},
C_{16}, C_{26}, C_{36}, C_{46}, C_{56},
C_{14}, C_{15}, C_{25}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# Based on the monoclinic matrix and not tested on real case.
# If you have test cases for this symmetry send them to the author.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0,uyz,uxz, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0,uxz, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxy, 0,uxx, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0, 0,uxy, 0,uxx,uyy,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz,uyz,uxz, 0, 0, 0, 0]])
def get_cij_order(cryst):
'''Give order of of elastic constants for the structure
:param cryst: ASE Atoms object
:returns: Order of elastic constants as a tuple of strings: C_ij
'''
orders = {
1: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36',
'C_46', 'C_56', 'C_14', 'C_15', 'C_25', 'C_45'),
2: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36', 'C_45'),
3: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23', 'C_44',
'C_55', 'C_66'),
4: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
5: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
6: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44'),
7: ('C_11', 'C_12', 'C_44'),
}
return orders[get_lattice_type(cryst)[0]]
def get_bulk_modulus(cryst):
'''Calculate bulk modulus using the Birch-Murnaghan equation of state.
The EOS must be previously calculated by get_BM_EOS routine.
The returned bulk modulus is a :math:`B_0` coefficient of the B-M EOS.
The units of the result are defined by ASE. To get the result in
any particular units (e.g. GPa) you need to divide it by
ase.units.<unit name>::
get_bulk_modulus(cryst)/ase.units.GPa
:param cryst: ASE Atoms object
:returns: float, bulk modulus :math:`B_0` in ASE units.
'''
if getattr(cryst, 'bm_eos', None) is None:
raise RuntimeError('Missing B-M EOS data.')
cryst.bulk_modulus = cryst.bm_eos[1]
return cryst.bulk_modulus
def get_pressure(s):
'''Return *external* isotropic (hydrostatic) pressure in ASE units.
If the pressure is positive the system is under external pressure.
This is a convenience function to convert output of get_stress function
into external pressure.
:param cryst: stress tensor in Voight (vector) notation as returned by
the get_stress() method.
:returns: float, external hydrostatic pressure in ASE units.
'''
return -mean(s[:3])
def get_BM_EOS(cryst, systems):
"""Calculate Birch-Murnaghan Equation of State for the crystal.
The B-M equation of state is defined by:
.. math::
P(V)= \\frac{B_0}{B'_0}\\left[
\\left({\\frac{V}{V_0}}\\right)^{-B'_0} - 1
\\right]
It's coefficients are estimated using n single-point structures ganerated
from the crystal (cryst) by the scan_volumes function between two relative
volumes. The BM EOS is fitted to the computed points by
least squares method. The returned value is a list of fitted
parameters: :math:`V_0, B_0, B_0'` if the fit succeded.
If the fitting fails the ``RuntimeError('Calculation failed')`` is raised.
The data from the calculation and fit is stored in the bm_eos and pv
members of cryst for future reference. You have to provide properly
optimized structures in cryst and systems list.
:param cryst: Atoms object, basic structure
:param systems: A list of calculated structures
:returns: tuple of EOS parameters :math:`V_0, B_0, B_0'`.
"""
pvdat = array([[r.get_volume(),
get_pressure(r.get_stress()),
norm(r.get_cell()[:, 0]),
norm(r.get_cell()[:, 1]),
norm(r.get_cell()[:, 2])] for r in systems]).T
# Estimate the initial guess assuming b0p=1
# Limiting volumes
v1 = min(pvdat[0])
v2 = max(pvdat[0])
# The pressure is falling with the growing volume
p2 = min(pvdat[1])
p1 = max(pvdat[1])
b0 = (p1*v1-p2*v2)/(v2-v1)
v0 = v1*(p1+b0)/b0
# Initial guess
p0 = [v0, b0, 1]
# Fitting
try :
p1, succ = optimize.curve_fit(BMEOS, pvdat[0], pvdat[1], p0)
except (ValueError, RuntimeError, optimize.OptimizeWarning) as ex:
raise RuntimeError('Calculation failed')
cryst.bm_eos = p1
cryst.pv = pvdat
return cryst.bm_eos
def get_elementary_deformations(cryst, n=5, d=2):
'''Generate elementary deformations for elastic tensor calculation.
The deformations are created based on the symmetry of the crystal and
are limited to the non-equivalet axes of the crystal.
:param cryst: Atoms object, basic structure
:param n: integer, number of deformations per non-equivalent axis
:param d: float, size of the maximum deformation in percent and degrees
:returns: list of deformed structures
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
systems = []
for a in axis:
if a < 3: # tetragonal deformation
for dx in linspace(-d, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
elif a < 6: # sheer deformation (skip the zero angle)
for dx in linspace(d/10.0, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
return systems
def get_elastic_tensor(cryst, systems):
'''Calculate elastic tensor of the crystal.
The elastic tensor is calculated from the stress-strain relation
and derived by fitting this relation to the set of linear equations
build from the symmetry of the crystal and strains and stresses
of the set of elementary deformations of the unit cell.
It is assumed that the crystal is converged and optimized
under intended pressure/stress. The geometry and stress on the
cryst is taken as the reference point. No additional optimization
will be run. Structures in cryst and systems list must have calculated
stresses. The function returns tuple of :math:`C_{ij}` elastic tensor,
raw Birch coefficients :math:`B_{ij}` and fitting results: residuals,
solution rank, singular values returned by numpy.linalg.lstsq.
:param cryst: Atoms object, basic structure
:param systems: list of Atoms object with calculated deformed structures
:returns: tuple(:math:`C_{ij}` float vector,
tuple(:math:`B_{ij}` float vector, residuals, solution rank, singular values))
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
ul = []
sl = []
p = get_pressure(cryst.get_stress())
for g in systems:
ul.append(get_strain(g, refcell=cryst))
# Remove the ambient pressure from the stress tensor
sl.append(g.get_stress()-array([p, p, p, 0, 0, 0]))
# print(symm, ul)
eqm = array([symm(u) for u in ul])
# print(eqm)
# print(eqm[0].shape, eqm.shape)
eqm = reshape(eqm, (eqm.shape[0]*eqm.shape[1], eqm.shape[2]))
# print(eqm)
slm = reshape(array(sl), (-1,))
# print(eqm.shape, slm.shape)
# print(slm)
Bij = lstsq(eqm, slm)
# print(Bij[0] / units.GPa)
# Calculate elastic constants from Birch coeff.
# TODO: Check the sign of the pressure array in the B <=> C relation
if (symm == orthorombic):
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p])
elif (symm == tetragonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, -p])
elif (symm == regular):
Cij = Bij[0] - array([-p, p, -p])
elif (symm == trigonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, p])
elif (symm == hexagonal):
Cij = Bij[0] - array([-p, -p, p, p, -p])
elif (symm == monoclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p, p, p, p, p])
elif (symm == triclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p,
p, p, p, p, p, p, p, p, p])
return Cij, Bij
def scan_pressures(cryst, lo, hi, n=5, eos=None):
'''
Scan the pressure axis from lo to hi (inclusive)
using B-M EOS as the volume predictor.
Pressure (lo, hi) in GPa
'''
# Inverse B-M EOS to get volumes from pressures
# This will work only in limited pressure range p>-B/B'.
# Warning! Relative, the V0 prefactor is removed.
def invbmeos(b, bp, x):
return array([pow(b/(bp*xv+b), 1/(3*bp)) for xv in x])
if eos is None:
raise RuntimeError('Required EOS data missing')
# Limit negative pressures to 90% of the singularity value.
# Beyond this B-M EOS is bound to be wrong anyway.
lo = max(lo, -0.9*eos[1]/eos[2])
scale = (eos[0]/cryst.get_volume())*invbmeos(eos[1], eos[2],
linspace(lo, hi, num=n))
# print(scale)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def scan_volumes(cryst, lo=0.98, hi=1.02, n=5, scale_volumes=True):
'''
Provide set of crystals along volume axis from lo to hi (inclusive).
No volume cell optimization is performed. Bounds are specified as
fractions (1.10 = 10% increase). If scale_volumes==False the scalling
is applied to lattice vectors instead of volumes.
:param lo: lower bound of the V/V_0 in the scan
:param hi: upper bound of the V/V_0 in the scan
:param n: number of volume sample points
:param scale_volumes: If True scale the unit cell volume or,
if False, scale the length of lattice axes.
:returns: a list of deformed systems
'''
scale = linspace(lo, hi, num=n)
if scale_volumes:
scale **= (1.0/3.0)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def get_vecang_cell(cryst, uc=None):
'''
Compute A,B,C, alpha,beta,gamma cell params
from the unit cell matrix (uc) or cryst.
Angles in radians.
'''
if uc is None:
uc = cryst.get_cell()
ucv = [uc[i, :]/norm(uc[i, :]) for i in range(3)]
uca = [acos(dot(ucv[(i+1) % 3], ucv[(i+2) % 3])) for i in range(3)]
return [norm(uc[i, :]) for i in range(3)] + uca
def get_deformed_cell(base_cryst, axis=0, size=1):
'''
Return the cell (with atoms) deformed along one
cell parameter (0,1,2 = a,b,c ; 3,4,5 = alpha,beta,gamma) by
size percent or size degrees (axis/angles).
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
if axis < 3:
uc[axis, :] = (1+size/100.0)*uc[axis, :]
else:
(a, b, c, alp, bet, gam) = get_vecang_cell(cryst)
d = array([0.0, 0.0, 0.0])
d[axis-3] = pi*size/180
(alp, bet, gam) = array((alp, bet, gam))+d
t = 1 - (ctg(bet)*ctg(gam)-cos(alp)*csc(bet)*csc(gam))**2
if t < 0.0:
print('''
The parameters (alpha,beta,gamma)=(%f,%f,%f) are probably
incorrect and lead to imaginary coordinates.
This range of parameters is unsupported by this program
(and is, let me say, very strange for a crystal).
Cennot continue, bye.''' % (alp, bet, gam))
raise ValueError
else:
uc = [[a, 0.0, 0.0],
[b*cos(gam), b*sin(gam), 0],
[c*cos(bet),
c*(cos(alp)/sin(gam) - cos(bet)*ctg(gam)),
c*sin(bet)*sqrt(t)]]
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_cart_deformed_cell(base_cryst, axis=0, size=1):
'''Return the cell deformed along one of the cartesian directions
Creates new deformed structure. The deformation is based on the
base structure and is performed along single axis. The axis is
specified as follows: 0,1,2 = x,y,z ; sheers: 3,4,5 = yz, xz, xy.
The size of the deformation is in percent and degrees, respectively.
:param base_cryst: structure to be deformed
:param axis: direction of deformation
:param size: size of the deformation
:returns: new, deformed structure
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
s = size/100.0
L = diag(ones(3))
if axis < 3:
L[axis, axis] += s
else:
if axis == 3:
L[1, 2] += s
elif axis == 4:
L[0, 2] += s
else:
L[0, 1] += s
uc = dot(uc, L)
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_strain(cryst, refcell=None):
'''Calculate strain tensor in the Voight notation
Computes the strain tensor in the Voight notation as a conventional
6-vector. The calculation is done with respect to the crystal
geometry passed in refcell parameter.
:param cryst: deformed structure
:param refcell: reference, undeformed structure
:returns: 6-vector of strain tensor in the Voight notation
'''
if refcell is None:
refcell = cryst
du = cryst.get_cell()-refcell.get_cell()
m = refcell.get_cell()
m = inv(m)
u = dot(m, du)
u = (u+u.T)/2
return array([u[0, 0], u[1, 1], u[2, 2], u[2, 1], u[2, 0], u[1, 0]])
if __name__ == '__main__':
from ase.spacegroup import crystal
a = 4.194
cryst = crystal(['Mg', 'O'],
[(0, 0, 0), (0.5, 0.5, 0.5)],
spacegroup=225,
cellpar=[a, a, a, 90, 90, 90])
sl = scan_volumes(cryst)
print('Volumes: ', end='')
for c in sl:
print('%.2f (%.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end=' ')
print()
sl = get_elementary_deformations(cryst)
print('Structures: ')
print(' Vol A B C alph bet gam')
for n, c in enumerate(sl):
print('%.4f (%5.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end='')
print((3*' %7.4f' + ' ' + 3*' %7.2f') %
tuple(c.get_cell_lengths_and_angles()))
|
jochym/Elastic | elastic/elastic.py | get_bulk_modulus | python | def get_bulk_modulus(cryst):
'''Calculate bulk modulus using the Birch-Murnaghan equation of state.
The EOS must be previously calculated by get_BM_EOS routine.
The returned bulk modulus is a :math:`B_0` coefficient of the B-M EOS.
The units of the result are defined by ASE. To get the result in
any particular units (e.g. GPa) you need to divide it by
ase.units.<unit name>::
get_bulk_modulus(cryst)/ase.units.GPa
:param cryst: ASE Atoms object
:returns: float, bulk modulus :math:`B_0` in ASE units.
'''
if getattr(cryst, 'bm_eos', None) is None:
raise RuntimeError('Missing B-M EOS data.')
cryst.bulk_modulus = cryst.bm_eos[1]
return cryst.bulk_modulus | Calculate bulk modulus using the Birch-Murnaghan equation of state.
The EOS must be previously calculated by get_BM_EOS routine.
The returned bulk modulus is a :math:`B_0` coefficient of the B-M EOS.
The units of the result are defined by ASE. To get the result in
any particular units (e.g. GPa) you need to divide it by
ase.units.<unit name>::
get_bulk_modulus(cryst)/ase.units.GPa
:param cryst: ASE Atoms object
:returns: float, bulk modulus :math:`B_0` in ASE units. | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/elastic/elastic.py#L348-L367 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 1998-2017 by Paweł T. Jochym <pawel.jochym@ifj.edu.pl>
#
# This file is part of Elastic.
# Elastic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Elastic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Elastic. If not, see <http://www.gnu.org/licenses/>.
'''
.. _elastic-mod:
Elastic Module
^^^^^^^^^^^^^^
Elastic is a module for calculation of :math:`C_{ij}` components of elastic
tensor from the strain-stress relation.
The strain components here are ordered in standard way which is different
to ordering in previous versions of the code (up to 4.0).
The ordering is: :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}`.
The general ordering of :math:`C_{ij}` components is (except for triclinic
symmetry and taking into account customary names of constants - e.g.
:math:`C_{16} \\rightarrow C_{14}`):
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
The functions with the name of bravais lattices define the symmetry of the
:math:`C_{ij}` matrix. The matrix is N columns by 6 rows where the columns
corespond to independent elastic constants of the given crystal, while the rows
corespond to the canonical deformations of a crystal. The elements are the
second partial derivatives of the free energy formula for the crystal written
down as a quadratic form of the deformations with respect to elastic constant
and deformation.
*Note:*
The elements for deformations :math:`u_{xy}, u_{xz}, u_{yz}`
have to be divided by 2 to properly match the usual definition
of elastic constants.
See: [LL]_ L.D. Landau, E.M. Lifszyc, "Theory of elasticity"
There is some usefull summary also at:
`ScienceWorld <http://scienceworld.wolfram.com/physics/Elasticity.html>`_
---------
'''
from __future__ import print_function, division, absolute_import
import re
from ase.atoms import Atoms
try:
# Try new release of spglib
import spglib as spg
except ImportError:
# Old naming scheme
from pyspglib import spglib as spg
from scipy.linalg import norm, lstsq
from scipy import optimize
from numpy.linalg import inv
from numpy import dot, diag, ones, reshape, linspace, array, mean
from math import acos, pi, cos, sin, sqrt
def BMEOS(v, v0, b0, b0p):
return (b0/b0p)*(pow(v0/v, b0p) - 1)
def ctg(x):
return cos(x)/sin(x)
def csc(x):
return 1/sin(x)
def regular(u):
'''
Equation matrix generation for the regular (cubic) lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{12}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, uyy + uzz, 0],
[uyy, uxx + uzz, 0],
[uzz, uxx + uyy, 0],
[0, 0, 2*uyz],
[0, 0, 2*uxz],
[0, 0, 2*uxy]])
def tetragonal(u):
'''
Equation matrix generation for the tetragonal lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, uyy, uzz, 0, 0],
[uyy, 0, uxx, uzz, 0, 0],
[0, uzz, 0, uxx+uyy, 0, 0],
[0, 0, 0, 0, 0, 2*uxy],
[0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 2*uyz, 0]])
def orthorombic(u):
'''
Equation matrix generation for the orthorombic lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0, uyy, uzz, 0, 0, 0, 0],
[0, uyy, 0, uxx, 0, uzz, 0, 0, 0],
[0, 0, uzz, 0, uxx, uyy, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 2*uyz, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 2*uxy]])
def trigonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Not tested yet.
# TODO: There is still some doubt about the :math:`C_{14}` constant.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0, 2*uxz ],
[ uyy, 0, uxx, uzz, 0, -2*uxz ],
[ 0, uzz, 0, uxx+uyy, 0, 0 ],
[ 0, 0, 0, 0, 2*uyz, -4*uxy ],
[ 0, 0, 0, 0, 2*uxz, 2*(uxx-uyy)],
[ 2*uxy, 0, -2*uxy, 0, 0, -4*uyz ]])
def hexagonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Still needs good verification
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0 ],
[ uyy, 0, uxx, uzz, 0 ],
[ 0, uzz, 0, uxx+uyy, 0 ],
[ 0, 0, 0, 0, 2*uyz ],
[ 0, 0, 0, 0, 2*uxz ],
[ 2*uxy, 0, -2*uxy, 0, 0 ]])
def monoclinic(u):
'''Monoclinic group,
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz, 0]])
def triclinic(u):
'''Triclinic crystals.
*Note*: This was never tested on the real case. Beware!
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33},
C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66},
C_{16}, C_{26}, C_{36}, C_{46}, C_{56},
C_{14}, C_{15}, C_{25}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# Based on the monoclinic matrix and not tested on real case.
# If you have test cases for this symmetry send them to the author.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0,uyz,uxz, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0,uxz, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxy, 0,uxx, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0, 0,uxy, 0,uxx,uyy,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz,uyz,uxz, 0, 0, 0, 0]])
def get_cij_order(cryst):
'''Give order of of elastic constants for the structure
:param cryst: ASE Atoms object
:returns: Order of elastic constants as a tuple of strings: C_ij
'''
orders = {
1: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36',
'C_46', 'C_56', 'C_14', 'C_15', 'C_25', 'C_45'),
2: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36', 'C_45'),
3: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23', 'C_44',
'C_55', 'C_66'),
4: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
5: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
6: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44'),
7: ('C_11', 'C_12', 'C_44'),
}
return orders[get_lattice_type(cryst)[0]]
def get_lattice_type(cryst):
'''Find the symmetry of the crystal using spglib symmetry finder.
Derive name of the space group and its number extracted from the result.
Based on the group number identify also the lattice type and the Bravais
lattice of the crystal. The lattice type numbers are
(the numbering starts from 1):
Triclinic (1), Monoclinic (2), Orthorombic (3),
Tetragonal (4), Trigonal (5), Hexagonal (6), Cubic (7)
:param cryst: ASE Atoms object
:returns: tuple (lattice type number (1-7), lattice name, space group
name, space group number)
'''
# Table of lattice types and correcponding group numbers dividing
# the ranges. See get_lattice_type method for precise definition.
lattice_types = [
[3, "Triclinic"],
[16, "Monoclinic"],
[75, "Orthorombic"],
[143, "Tetragonal"],
[168, "Trigonal"],
[195, "Hexagonal"],
[231, "Cubic"]
]
sg = spg.get_spacegroup(cryst)
m = re.match(r'([A-Z].*\b)\s*\(([0-9]*)\)', sg)
sg_name = m.group(1)
sg_nr = int(m.group(2))
for n, l in enumerate(lattice_types):
if sg_nr < l[0]:
bravais = l[1]
lattype = n+1
break
return lattype, bravais, sg_name, sg_nr
def get_pressure(s):
'''Return *external* isotropic (hydrostatic) pressure in ASE units.
If the pressure is positive the system is under external pressure.
This is a convenience function to convert output of get_stress function
into external pressure.
:param cryst: stress tensor in Voight (vector) notation as returned by
the get_stress() method.
:returns: float, external hydrostatic pressure in ASE units.
'''
return -mean(s[:3])
def get_BM_EOS(cryst, systems):
"""Calculate Birch-Murnaghan Equation of State for the crystal.
The B-M equation of state is defined by:
.. math::
P(V)= \\frac{B_0}{B'_0}\\left[
\\left({\\frac{V}{V_0}}\\right)^{-B'_0} - 1
\\right]
It's coefficients are estimated using n single-point structures ganerated
from the crystal (cryst) by the scan_volumes function between two relative
volumes. The BM EOS is fitted to the computed points by
least squares method. The returned value is a list of fitted
parameters: :math:`V_0, B_0, B_0'` if the fit succeded.
If the fitting fails the ``RuntimeError('Calculation failed')`` is raised.
The data from the calculation and fit is stored in the bm_eos and pv
members of cryst for future reference. You have to provide properly
optimized structures in cryst and systems list.
:param cryst: Atoms object, basic structure
:param systems: A list of calculated structures
:returns: tuple of EOS parameters :math:`V_0, B_0, B_0'`.
"""
pvdat = array([[r.get_volume(),
get_pressure(r.get_stress()),
norm(r.get_cell()[:, 0]),
norm(r.get_cell()[:, 1]),
norm(r.get_cell()[:, 2])] for r in systems]).T
# Estimate the initial guess assuming b0p=1
# Limiting volumes
v1 = min(pvdat[0])
v2 = max(pvdat[0])
# The pressure is falling with the growing volume
p2 = min(pvdat[1])
p1 = max(pvdat[1])
b0 = (p1*v1-p2*v2)/(v2-v1)
v0 = v1*(p1+b0)/b0
# Initial guess
p0 = [v0, b0, 1]
# Fitting
try :
p1, succ = optimize.curve_fit(BMEOS, pvdat[0], pvdat[1], p0)
except (ValueError, RuntimeError, optimize.OptimizeWarning) as ex:
raise RuntimeError('Calculation failed')
cryst.bm_eos = p1
cryst.pv = pvdat
return cryst.bm_eos
def get_elementary_deformations(cryst, n=5, d=2):
'''Generate elementary deformations for elastic tensor calculation.
The deformations are created based on the symmetry of the crystal and
are limited to the non-equivalet axes of the crystal.
:param cryst: Atoms object, basic structure
:param n: integer, number of deformations per non-equivalent axis
:param d: float, size of the maximum deformation in percent and degrees
:returns: list of deformed structures
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
systems = []
for a in axis:
if a < 3: # tetragonal deformation
for dx in linspace(-d, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
elif a < 6: # sheer deformation (skip the zero angle)
for dx in linspace(d/10.0, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
return systems
def get_elastic_tensor(cryst, systems):
'''Calculate elastic tensor of the crystal.
The elastic tensor is calculated from the stress-strain relation
and derived by fitting this relation to the set of linear equations
build from the symmetry of the crystal and strains and stresses
of the set of elementary deformations of the unit cell.
It is assumed that the crystal is converged and optimized
under intended pressure/stress. The geometry and stress on the
cryst is taken as the reference point. No additional optimization
will be run. Structures in cryst and systems list must have calculated
stresses. The function returns tuple of :math:`C_{ij}` elastic tensor,
raw Birch coefficients :math:`B_{ij}` and fitting results: residuals,
solution rank, singular values returned by numpy.linalg.lstsq.
:param cryst: Atoms object, basic structure
:param systems: list of Atoms object with calculated deformed structures
:returns: tuple(:math:`C_{ij}` float vector,
tuple(:math:`B_{ij}` float vector, residuals, solution rank, singular values))
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
ul = []
sl = []
p = get_pressure(cryst.get_stress())
for g in systems:
ul.append(get_strain(g, refcell=cryst))
# Remove the ambient pressure from the stress tensor
sl.append(g.get_stress()-array([p, p, p, 0, 0, 0]))
# print(symm, ul)
eqm = array([symm(u) for u in ul])
# print(eqm)
# print(eqm[0].shape, eqm.shape)
eqm = reshape(eqm, (eqm.shape[0]*eqm.shape[1], eqm.shape[2]))
# print(eqm)
slm = reshape(array(sl), (-1,))
# print(eqm.shape, slm.shape)
# print(slm)
Bij = lstsq(eqm, slm)
# print(Bij[0] / units.GPa)
# Calculate elastic constants from Birch coeff.
# TODO: Check the sign of the pressure array in the B <=> C relation
if (symm == orthorombic):
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p])
elif (symm == tetragonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, -p])
elif (symm == regular):
Cij = Bij[0] - array([-p, p, -p])
elif (symm == trigonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, p])
elif (symm == hexagonal):
Cij = Bij[0] - array([-p, -p, p, p, -p])
elif (symm == monoclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p, p, p, p, p])
elif (symm == triclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p,
p, p, p, p, p, p, p, p, p])
return Cij, Bij
def scan_pressures(cryst, lo, hi, n=5, eos=None):
'''
Scan the pressure axis from lo to hi (inclusive)
using B-M EOS as the volume predictor.
Pressure (lo, hi) in GPa
'''
# Inverse B-M EOS to get volumes from pressures
# This will work only in limited pressure range p>-B/B'.
# Warning! Relative, the V0 prefactor is removed.
def invbmeos(b, bp, x):
return array([pow(b/(bp*xv+b), 1/(3*bp)) for xv in x])
if eos is None:
raise RuntimeError('Required EOS data missing')
# Limit negative pressures to 90% of the singularity value.
# Beyond this B-M EOS is bound to be wrong anyway.
lo = max(lo, -0.9*eos[1]/eos[2])
scale = (eos[0]/cryst.get_volume())*invbmeos(eos[1], eos[2],
linspace(lo, hi, num=n))
# print(scale)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def scan_volumes(cryst, lo=0.98, hi=1.02, n=5, scale_volumes=True):
'''
Provide set of crystals along volume axis from lo to hi (inclusive).
No volume cell optimization is performed. Bounds are specified as
fractions (1.10 = 10% increase). If scale_volumes==False the scalling
is applied to lattice vectors instead of volumes.
:param lo: lower bound of the V/V_0 in the scan
:param hi: upper bound of the V/V_0 in the scan
:param n: number of volume sample points
:param scale_volumes: If True scale the unit cell volume or,
if False, scale the length of lattice axes.
:returns: a list of deformed systems
'''
scale = linspace(lo, hi, num=n)
if scale_volumes:
scale **= (1.0/3.0)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def get_vecang_cell(cryst, uc=None):
'''
Compute A,B,C, alpha,beta,gamma cell params
from the unit cell matrix (uc) or cryst.
Angles in radians.
'''
if uc is None:
uc = cryst.get_cell()
ucv = [uc[i, :]/norm(uc[i, :]) for i in range(3)]
uca = [acos(dot(ucv[(i+1) % 3], ucv[(i+2) % 3])) for i in range(3)]
return [norm(uc[i, :]) for i in range(3)] + uca
def get_deformed_cell(base_cryst, axis=0, size=1):
'''
Return the cell (with atoms) deformed along one
cell parameter (0,1,2 = a,b,c ; 3,4,5 = alpha,beta,gamma) by
size percent or size degrees (axis/angles).
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
if axis < 3:
uc[axis, :] = (1+size/100.0)*uc[axis, :]
else:
(a, b, c, alp, bet, gam) = get_vecang_cell(cryst)
d = array([0.0, 0.0, 0.0])
d[axis-3] = pi*size/180
(alp, bet, gam) = array((alp, bet, gam))+d
t = 1 - (ctg(bet)*ctg(gam)-cos(alp)*csc(bet)*csc(gam))**2
if t < 0.0:
print('''
The parameters (alpha,beta,gamma)=(%f,%f,%f) are probably
incorrect and lead to imaginary coordinates.
This range of parameters is unsupported by this program
(and is, let me say, very strange for a crystal).
Cennot continue, bye.''' % (alp, bet, gam))
raise ValueError
else:
uc = [[a, 0.0, 0.0],
[b*cos(gam), b*sin(gam), 0],
[c*cos(bet),
c*(cos(alp)/sin(gam) - cos(bet)*ctg(gam)),
c*sin(bet)*sqrt(t)]]
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_cart_deformed_cell(base_cryst, axis=0, size=1):
'''Return the cell deformed along one of the cartesian directions
Creates new deformed structure. The deformation is based on the
base structure and is performed along single axis. The axis is
specified as follows: 0,1,2 = x,y,z ; sheers: 3,4,5 = yz, xz, xy.
The size of the deformation is in percent and degrees, respectively.
:param base_cryst: structure to be deformed
:param axis: direction of deformation
:param size: size of the deformation
:returns: new, deformed structure
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
s = size/100.0
L = diag(ones(3))
if axis < 3:
L[axis, axis] += s
else:
if axis == 3:
L[1, 2] += s
elif axis == 4:
L[0, 2] += s
else:
L[0, 1] += s
uc = dot(uc, L)
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_strain(cryst, refcell=None):
'''Calculate strain tensor in the Voight notation
Computes the strain tensor in the Voight notation as a conventional
6-vector. The calculation is done with respect to the crystal
geometry passed in refcell parameter.
:param cryst: deformed structure
:param refcell: reference, undeformed structure
:returns: 6-vector of strain tensor in the Voight notation
'''
if refcell is None:
refcell = cryst
du = cryst.get_cell()-refcell.get_cell()
m = refcell.get_cell()
m = inv(m)
u = dot(m, du)
u = (u+u.T)/2
return array([u[0, 0], u[1, 1], u[2, 2], u[2, 1], u[2, 0], u[1, 0]])
if __name__ == '__main__':
from ase.spacegroup import crystal
a = 4.194
cryst = crystal(['Mg', 'O'],
[(0, 0, 0), (0.5, 0.5, 0.5)],
spacegroup=225,
cellpar=[a, a, a, 90, 90, 90])
sl = scan_volumes(cryst)
print('Volumes: ', end='')
for c in sl:
print('%.2f (%.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end=' ')
print()
sl = get_elementary_deformations(cryst)
print('Structures: ')
print(' Vol A B C alph bet gam')
for n, c in enumerate(sl):
print('%.4f (%5.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end='')
print((3*' %7.4f' + ' ' + 3*' %7.2f') %
tuple(c.get_cell_lengths_and_angles()))
|
jochym/Elastic | elastic/elastic.py | get_BM_EOS | python | def get_BM_EOS(cryst, systems):
pvdat = array([[r.get_volume(),
get_pressure(r.get_stress()),
norm(r.get_cell()[:, 0]),
norm(r.get_cell()[:, 1]),
norm(r.get_cell()[:, 2])] for r in systems]).T
# Estimate the initial guess assuming b0p=1
# Limiting volumes
v1 = min(pvdat[0])
v2 = max(pvdat[0])
# The pressure is falling with the growing volume
p2 = min(pvdat[1])
p1 = max(pvdat[1])
b0 = (p1*v1-p2*v2)/(v2-v1)
v0 = v1*(p1+b0)/b0
# Initial guess
p0 = [v0, b0, 1]
# Fitting
try :
p1, succ = optimize.curve_fit(BMEOS, pvdat[0], pvdat[1], p0)
except (ValueError, RuntimeError, optimize.OptimizeWarning) as ex:
raise RuntimeError('Calculation failed')
cryst.bm_eos = p1
cryst.pv = pvdat
return cryst.bm_eos | Calculate Birch-Murnaghan Equation of State for the crystal.
The B-M equation of state is defined by:
.. math::
P(V)= \\frac{B_0}{B'_0}\\left[
\\left({\\frac{V}{V_0}}\\right)^{-B'_0} - 1
\\right]
It's coefficients are estimated using n single-point structures ganerated
from the crystal (cryst) by the scan_volumes function between two relative
volumes. The BM EOS is fitted to the computed points by
least squares method. The returned value is a list of fitted
parameters: :math:`V_0, B_0, B_0'` if the fit succeded.
If the fitting fails the ``RuntimeError('Calculation failed')`` is raised.
The data from the calculation and fit is stored in the bm_eos and pv
members of cryst for future reference. You have to provide properly
optimized structures in cryst and systems list.
:param cryst: Atoms object, basic structure
:param systems: A list of calculated structures
:returns: tuple of EOS parameters :math:`V_0, B_0, B_0'`. | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/elastic/elastic.py#L386-L440 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 1998-2017 by Paweł T. Jochym <pawel.jochym@ifj.edu.pl>
#
# This file is part of Elastic.
# Elastic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Elastic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Elastic. If not, see <http://www.gnu.org/licenses/>.
'''
.. _elastic-mod:
Elastic Module
^^^^^^^^^^^^^^
Elastic is a module for calculation of :math:`C_{ij}` components of elastic
tensor from the strain-stress relation.
The strain components here are ordered in standard way which is different
to ordering in previous versions of the code (up to 4.0).
The ordering is: :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}`.
The general ordering of :math:`C_{ij}` components is (except for triclinic
symmetry and taking into account customary names of constants - e.g.
:math:`C_{16} \\rightarrow C_{14}`):
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
The functions with the name of bravais lattices define the symmetry of the
:math:`C_{ij}` matrix. The matrix is N columns by 6 rows where the columns
corespond to independent elastic constants of the given crystal, while the rows
corespond to the canonical deformations of a crystal. The elements are the
second partial derivatives of the free energy formula for the crystal written
down as a quadratic form of the deformations with respect to elastic constant
and deformation.
*Note:*
The elements for deformations :math:`u_{xy}, u_{xz}, u_{yz}`
have to be divided by 2 to properly match the usual definition
of elastic constants.
See: [LL]_ L.D. Landau, E.M. Lifszyc, "Theory of elasticity"
There is some usefull summary also at:
`ScienceWorld <http://scienceworld.wolfram.com/physics/Elasticity.html>`_
---------
'''
from __future__ import print_function, division, absolute_import
import re
from ase.atoms import Atoms
try:
# Try new release of spglib
import spglib as spg
except ImportError:
# Old naming scheme
from pyspglib import spglib as spg
from scipy.linalg import norm, lstsq
from scipy import optimize
from numpy.linalg import inv
from numpy import dot, diag, ones, reshape, linspace, array, mean
from math import acos, pi, cos, sin, sqrt
def BMEOS(v, v0, b0, b0p):
return (b0/b0p)*(pow(v0/v, b0p) - 1)
def ctg(x):
return cos(x)/sin(x)
def csc(x):
return 1/sin(x)
def regular(u):
'''
Equation matrix generation for the regular (cubic) lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{12}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, uyy + uzz, 0],
[uyy, uxx + uzz, 0],
[uzz, uxx + uyy, 0],
[0, 0, 2*uyz],
[0, 0, 2*uxz],
[0, 0, 2*uxy]])
def tetragonal(u):
'''
Equation matrix generation for the tetragonal lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, uyy, uzz, 0, 0],
[uyy, 0, uxx, uzz, 0, 0],
[0, uzz, 0, uxx+uyy, 0, 0],
[0, 0, 0, 0, 0, 2*uxy],
[0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 2*uyz, 0]])
def orthorombic(u):
'''
Equation matrix generation for the orthorombic lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0, uyy, uzz, 0, 0, 0, 0],
[0, uyy, 0, uxx, 0, uzz, 0, 0, 0],
[0, 0, uzz, 0, uxx, uyy, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 2*uyz, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 2*uxy]])
def trigonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Not tested yet.
# TODO: There is still some doubt about the :math:`C_{14}` constant.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0, 2*uxz ],
[ uyy, 0, uxx, uzz, 0, -2*uxz ],
[ 0, uzz, 0, uxx+uyy, 0, 0 ],
[ 0, 0, 0, 0, 2*uyz, -4*uxy ],
[ 0, 0, 0, 0, 2*uxz, 2*(uxx-uyy)],
[ 2*uxy, 0, -2*uxy, 0, 0, -4*uyz ]])
def hexagonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Still needs good verification
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0 ],
[ uyy, 0, uxx, uzz, 0 ],
[ 0, uzz, 0, uxx+uyy, 0 ],
[ 0, 0, 0, 0, 2*uyz ],
[ 0, 0, 0, 0, 2*uxz ],
[ 2*uxy, 0, -2*uxy, 0, 0 ]])
def monoclinic(u):
'''Monoclinic group,
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz, 0]])
def triclinic(u):
'''Triclinic crystals.
*Note*: This was never tested on the real case. Beware!
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33},
C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66},
C_{16}, C_{26}, C_{36}, C_{46}, C_{56},
C_{14}, C_{15}, C_{25}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# Based on the monoclinic matrix and not tested on real case.
# If you have test cases for this symmetry send them to the author.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0,uyz,uxz, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0,uxz, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxy, 0,uxx, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0, 0,uxy, 0,uxx,uyy,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz,uyz,uxz, 0, 0, 0, 0]])
def get_cij_order(cryst):
'''Give order of of elastic constants for the structure
:param cryst: ASE Atoms object
:returns: Order of elastic constants as a tuple of strings: C_ij
'''
orders = {
1: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36',
'C_46', 'C_56', 'C_14', 'C_15', 'C_25', 'C_45'),
2: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36', 'C_45'),
3: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23', 'C_44',
'C_55', 'C_66'),
4: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
5: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
6: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44'),
7: ('C_11', 'C_12', 'C_44'),
}
return orders[get_lattice_type(cryst)[0]]
def get_lattice_type(cryst):
'''Find the symmetry of the crystal using spglib symmetry finder.
Derive name of the space group and its number extracted from the result.
Based on the group number identify also the lattice type and the Bravais
lattice of the crystal. The lattice type numbers are
(the numbering starts from 1):
Triclinic (1), Monoclinic (2), Orthorombic (3),
Tetragonal (4), Trigonal (5), Hexagonal (6), Cubic (7)
:param cryst: ASE Atoms object
:returns: tuple (lattice type number (1-7), lattice name, space group
name, space group number)
'''
# Table of lattice types and correcponding group numbers dividing
# the ranges. See get_lattice_type method for precise definition.
lattice_types = [
[3, "Triclinic"],
[16, "Monoclinic"],
[75, "Orthorombic"],
[143, "Tetragonal"],
[168, "Trigonal"],
[195, "Hexagonal"],
[231, "Cubic"]
]
sg = spg.get_spacegroup(cryst)
m = re.match(r'([A-Z].*\b)\s*\(([0-9]*)\)', sg)
sg_name = m.group(1)
sg_nr = int(m.group(2))
for n, l in enumerate(lattice_types):
if sg_nr < l[0]:
bravais = l[1]
lattype = n+1
break
return lattype, bravais, sg_name, sg_nr
def get_bulk_modulus(cryst):
'''Calculate bulk modulus using the Birch-Murnaghan equation of state.
The EOS must be previously calculated by get_BM_EOS routine.
The returned bulk modulus is a :math:`B_0` coefficient of the B-M EOS.
The units of the result are defined by ASE. To get the result in
any particular units (e.g. GPa) you need to divide it by
ase.units.<unit name>::
get_bulk_modulus(cryst)/ase.units.GPa
:param cryst: ASE Atoms object
:returns: float, bulk modulus :math:`B_0` in ASE units.
'''
if getattr(cryst, 'bm_eos', None) is None:
raise RuntimeError('Missing B-M EOS data.')
cryst.bulk_modulus = cryst.bm_eos[1]
return cryst.bulk_modulus
def get_pressure(s):
'''Return *external* isotropic (hydrostatic) pressure in ASE units.
If the pressure is positive the system is under external pressure.
This is a convenience function to convert output of get_stress function
into external pressure.
:param cryst: stress tensor in Voight (vector) notation as returned by
the get_stress() method.
:returns: float, external hydrostatic pressure in ASE units.
'''
return -mean(s[:3])
def get_elementary_deformations(cryst, n=5, d=2):
'''Generate elementary deformations for elastic tensor calculation.
The deformations are created based on the symmetry of the crystal and
are limited to the non-equivalet axes of the crystal.
:param cryst: Atoms object, basic structure
:param n: integer, number of deformations per non-equivalent axis
:param d: float, size of the maximum deformation in percent and degrees
:returns: list of deformed structures
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
systems = []
for a in axis:
if a < 3: # tetragonal deformation
for dx in linspace(-d, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
elif a < 6: # sheer deformation (skip the zero angle)
for dx in linspace(d/10.0, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
return systems
def get_elastic_tensor(cryst, systems):
'''Calculate elastic tensor of the crystal.
The elastic tensor is calculated from the stress-strain relation
and derived by fitting this relation to the set of linear equations
build from the symmetry of the crystal and strains and stresses
of the set of elementary deformations of the unit cell.
It is assumed that the crystal is converged and optimized
under intended pressure/stress. The geometry and stress on the
cryst is taken as the reference point. No additional optimization
will be run. Structures in cryst and systems list must have calculated
stresses. The function returns tuple of :math:`C_{ij}` elastic tensor,
raw Birch coefficients :math:`B_{ij}` and fitting results: residuals,
solution rank, singular values returned by numpy.linalg.lstsq.
:param cryst: Atoms object, basic structure
:param systems: list of Atoms object with calculated deformed structures
:returns: tuple(:math:`C_{ij}` float vector,
tuple(:math:`B_{ij}` float vector, residuals, solution rank, singular values))
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
ul = []
sl = []
p = get_pressure(cryst.get_stress())
for g in systems:
ul.append(get_strain(g, refcell=cryst))
# Remove the ambient pressure from the stress tensor
sl.append(g.get_stress()-array([p, p, p, 0, 0, 0]))
# print(symm, ul)
eqm = array([symm(u) for u in ul])
# print(eqm)
# print(eqm[0].shape, eqm.shape)
eqm = reshape(eqm, (eqm.shape[0]*eqm.shape[1], eqm.shape[2]))
# print(eqm)
slm = reshape(array(sl), (-1,))
# print(eqm.shape, slm.shape)
# print(slm)
Bij = lstsq(eqm, slm)
# print(Bij[0] / units.GPa)
# Calculate elastic constants from Birch coeff.
# TODO: Check the sign of the pressure array in the B <=> C relation
if (symm == orthorombic):
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p])
elif (symm == tetragonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, -p])
elif (symm == regular):
Cij = Bij[0] - array([-p, p, -p])
elif (symm == trigonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, p])
elif (symm == hexagonal):
Cij = Bij[0] - array([-p, -p, p, p, -p])
elif (symm == monoclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p, p, p, p, p])
elif (symm == triclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p,
p, p, p, p, p, p, p, p, p])
return Cij, Bij
def scan_pressures(cryst, lo, hi, n=5, eos=None):
'''
Scan the pressure axis from lo to hi (inclusive)
using B-M EOS as the volume predictor.
Pressure (lo, hi) in GPa
'''
# Inverse B-M EOS to get volumes from pressures
# This will work only in limited pressure range p>-B/B'.
# Warning! Relative, the V0 prefactor is removed.
def invbmeos(b, bp, x):
return array([pow(b/(bp*xv+b), 1/(3*bp)) for xv in x])
if eos is None:
raise RuntimeError('Required EOS data missing')
# Limit negative pressures to 90% of the singularity value.
# Beyond this B-M EOS is bound to be wrong anyway.
lo = max(lo, -0.9*eos[1]/eos[2])
scale = (eos[0]/cryst.get_volume())*invbmeos(eos[1], eos[2],
linspace(lo, hi, num=n))
# print(scale)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def scan_volumes(cryst, lo=0.98, hi=1.02, n=5, scale_volumes=True):
'''
Provide set of crystals along volume axis from lo to hi (inclusive).
No volume cell optimization is performed. Bounds are specified as
fractions (1.10 = 10% increase). If scale_volumes==False the scalling
is applied to lattice vectors instead of volumes.
:param lo: lower bound of the V/V_0 in the scan
:param hi: upper bound of the V/V_0 in the scan
:param n: number of volume sample points
:param scale_volumes: If True scale the unit cell volume or,
if False, scale the length of lattice axes.
:returns: a list of deformed systems
'''
scale = linspace(lo, hi, num=n)
if scale_volumes:
scale **= (1.0/3.0)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def get_vecang_cell(cryst, uc=None):
'''
Compute A,B,C, alpha,beta,gamma cell params
from the unit cell matrix (uc) or cryst.
Angles in radians.
'''
if uc is None:
uc = cryst.get_cell()
ucv = [uc[i, :]/norm(uc[i, :]) for i in range(3)]
uca = [acos(dot(ucv[(i+1) % 3], ucv[(i+2) % 3])) for i in range(3)]
return [norm(uc[i, :]) for i in range(3)] + uca
def get_deformed_cell(base_cryst, axis=0, size=1):
'''
Return the cell (with atoms) deformed along one
cell parameter (0,1,2 = a,b,c ; 3,4,5 = alpha,beta,gamma) by
size percent or size degrees (axis/angles).
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
if axis < 3:
uc[axis, :] = (1+size/100.0)*uc[axis, :]
else:
(a, b, c, alp, bet, gam) = get_vecang_cell(cryst)
d = array([0.0, 0.0, 0.0])
d[axis-3] = pi*size/180
(alp, bet, gam) = array((alp, bet, gam))+d
t = 1 - (ctg(bet)*ctg(gam)-cos(alp)*csc(bet)*csc(gam))**2
if t < 0.0:
print('''
The parameters (alpha,beta,gamma)=(%f,%f,%f) are probably
incorrect and lead to imaginary coordinates.
This range of parameters is unsupported by this program
(and is, let me say, very strange for a crystal).
Cennot continue, bye.''' % (alp, bet, gam))
raise ValueError
else:
uc = [[a, 0.0, 0.0],
[b*cos(gam), b*sin(gam), 0],
[c*cos(bet),
c*(cos(alp)/sin(gam) - cos(bet)*ctg(gam)),
c*sin(bet)*sqrt(t)]]
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_cart_deformed_cell(base_cryst, axis=0, size=1):
'''Return the cell deformed along one of the cartesian directions
Creates new deformed structure. The deformation is based on the
base structure and is performed along single axis. The axis is
specified as follows: 0,1,2 = x,y,z ; sheers: 3,4,5 = yz, xz, xy.
The size of the deformation is in percent and degrees, respectively.
:param base_cryst: structure to be deformed
:param axis: direction of deformation
:param size: size of the deformation
:returns: new, deformed structure
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
s = size/100.0
L = diag(ones(3))
if axis < 3:
L[axis, axis] += s
else:
if axis == 3:
L[1, 2] += s
elif axis == 4:
L[0, 2] += s
else:
L[0, 1] += s
uc = dot(uc, L)
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_strain(cryst, refcell=None):
'''Calculate strain tensor in the Voight notation
Computes the strain tensor in the Voight notation as a conventional
6-vector. The calculation is done with respect to the crystal
geometry passed in refcell parameter.
:param cryst: deformed structure
:param refcell: reference, undeformed structure
:returns: 6-vector of strain tensor in the Voight notation
'''
if refcell is None:
refcell = cryst
du = cryst.get_cell()-refcell.get_cell()
m = refcell.get_cell()
m = inv(m)
u = dot(m, du)
u = (u+u.T)/2
return array([u[0, 0], u[1, 1], u[2, 2], u[2, 1], u[2, 0], u[1, 0]])
if __name__ == '__main__':
from ase.spacegroup import crystal
a = 4.194
cryst = crystal(['Mg', 'O'],
[(0, 0, 0), (0.5, 0.5, 0.5)],
spacegroup=225,
cellpar=[a, a, a, 90, 90, 90])
sl = scan_volumes(cryst)
print('Volumes: ', end='')
for c in sl:
print('%.2f (%.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end=' ')
print()
sl = get_elementary_deformations(cryst)
print('Structures: ')
print(' Vol A B C alph bet gam')
for n, c in enumerate(sl):
print('%.4f (%5.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end='')
print((3*' %7.4f' + ' ' + 3*' %7.2f') %
tuple(c.get_cell_lengths_and_angles()))
|
jochym/Elastic | elastic/elastic.py | get_elementary_deformations | python | def get_elementary_deformations(cryst, n=5, d=2):
'''Generate elementary deformations for elastic tensor calculation.
The deformations are created based on the symmetry of the crystal and
are limited to the non-equivalet axes of the crystal.
:param cryst: Atoms object, basic structure
:param n: integer, number of deformations per non-equivalent axis
:param d: float, size of the maximum deformation in percent and degrees
:returns: list of deformed structures
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
systems = []
for a in axis:
if a < 3: # tetragonal deformation
for dx in linspace(-d, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
elif a < 6: # sheer deformation (skip the zero angle)
for dx in linspace(d/10.0, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
return systems | Generate elementary deformations for elastic tensor calculation.
The deformations are created based on the symmetry of the crystal and
are limited to the non-equivalet axes of the crystal.
:param cryst: Atoms object, basic structure
:param n: integer, number of deformations per non-equivalent axis
:param d: float, size of the maximum deformation in percent and degrees
:returns: list of deformed structures | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/elastic/elastic.py#L443-L482 | [
"def get_lattice_type(cryst):\n '''Find the symmetry of the crystal using spglib symmetry finder.\n\n Derive name of the space group and its number extracted from the result.\n Based on the group number identify also the lattice type and the Bravais\n lattice of the crystal. The lattice type numbers are... | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 1998-2017 by Paweł T. Jochym <pawel.jochym@ifj.edu.pl>
#
# This file is part of Elastic.
# Elastic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Elastic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Elastic. If not, see <http://www.gnu.org/licenses/>.
'''
.. _elastic-mod:
Elastic Module
^^^^^^^^^^^^^^
Elastic is a module for calculation of :math:`C_{ij}` components of elastic
tensor from the strain-stress relation.
The strain components here are ordered in standard way which is different
to ordering in previous versions of the code (up to 4.0).
The ordering is: :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}`.
The general ordering of :math:`C_{ij}` components is (except for triclinic
symmetry and taking into account customary names of constants - e.g.
:math:`C_{16} \\rightarrow C_{14}`):
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
The functions with the name of bravais lattices define the symmetry of the
:math:`C_{ij}` matrix. The matrix is N columns by 6 rows where the columns
corespond to independent elastic constants of the given crystal, while the rows
corespond to the canonical deformations of a crystal. The elements are the
second partial derivatives of the free energy formula for the crystal written
down as a quadratic form of the deformations with respect to elastic constant
and deformation.
*Note:*
The elements for deformations :math:`u_{xy}, u_{xz}, u_{yz}`
have to be divided by 2 to properly match the usual definition
of elastic constants.
See: [LL]_ L.D. Landau, E.M. Lifszyc, "Theory of elasticity"
There is some usefull summary also at:
`ScienceWorld <http://scienceworld.wolfram.com/physics/Elasticity.html>`_
---------
'''
from __future__ import print_function, division, absolute_import
import re
from ase.atoms import Atoms
try:
# Try new release of spglib
import spglib as spg
except ImportError:
# Old naming scheme
from pyspglib import spglib as spg
from scipy.linalg import norm, lstsq
from scipy import optimize
from numpy.linalg import inv
from numpy import dot, diag, ones, reshape, linspace, array, mean
from math import acos, pi, cos, sin, sqrt
def BMEOS(v, v0, b0, b0p):
return (b0/b0p)*(pow(v0/v, b0p) - 1)
def ctg(x):
return cos(x)/sin(x)
def csc(x):
return 1/sin(x)
def regular(u):
'''
Equation matrix generation for the regular (cubic) lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{12}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, uyy + uzz, 0],
[uyy, uxx + uzz, 0],
[uzz, uxx + uyy, 0],
[0, 0, 2*uyz],
[0, 0, 2*uxz],
[0, 0, 2*uxy]])
def tetragonal(u):
'''
Equation matrix generation for the tetragonal lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, uyy, uzz, 0, 0],
[uyy, 0, uxx, uzz, 0, 0],
[0, uzz, 0, uxx+uyy, 0, 0],
[0, 0, 0, 0, 0, 2*uxy],
[0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 2*uyz, 0]])
def orthorombic(u):
'''
Equation matrix generation for the orthorombic lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0, uyy, uzz, 0, 0, 0, 0],
[0, uyy, 0, uxx, 0, uzz, 0, 0, 0],
[0, 0, uzz, 0, uxx, uyy, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 2*uyz, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 2*uxy]])
def trigonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Not tested yet.
# TODO: There is still some doubt about the :math:`C_{14}` constant.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0, 2*uxz ],
[ uyy, 0, uxx, uzz, 0, -2*uxz ],
[ 0, uzz, 0, uxx+uyy, 0, 0 ],
[ 0, 0, 0, 0, 2*uyz, -4*uxy ],
[ 0, 0, 0, 0, 2*uxz, 2*(uxx-uyy)],
[ 2*uxy, 0, -2*uxy, 0, 0, -4*uyz ]])
def hexagonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Still needs good verification
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0 ],
[ uyy, 0, uxx, uzz, 0 ],
[ 0, uzz, 0, uxx+uyy, 0 ],
[ 0, 0, 0, 0, 2*uyz ],
[ 0, 0, 0, 0, 2*uxz ],
[ 2*uxy, 0, -2*uxy, 0, 0 ]])
def monoclinic(u):
'''Monoclinic group,
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz, 0]])
def triclinic(u):
'''Triclinic crystals.
*Note*: This was never tested on the real case. Beware!
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33},
C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66},
C_{16}, C_{26}, C_{36}, C_{46}, C_{56},
C_{14}, C_{15}, C_{25}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# Based on the monoclinic matrix and not tested on real case.
# If you have test cases for this symmetry send them to the author.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0,uyz,uxz, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0,uxz, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxy, 0,uxx, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0, 0,uxy, 0,uxx,uyy,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz,uyz,uxz, 0, 0, 0, 0]])
def get_cij_order(cryst):
'''Give order of of elastic constants for the structure
:param cryst: ASE Atoms object
:returns: Order of elastic constants as a tuple of strings: C_ij
'''
orders = {
1: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36',
'C_46', 'C_56', 'C_14', 'C_15', 'C_25', 'C_45'),
2: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36', 'C_45'),
3: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23', 'C_44',
'C_55', 'C_66'),
4: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
5: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
6: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44'),
7: ('C_11', 'C_12', 'C_44'),
}
return orders[get_lattice_type(cryst)[0]]
def get_lattice_type(cryst):
'''Find the symmetry of the crystal using spglib symmetry finder.
Derive name of the space group and its number extracted from the result.
Based on the group number identify also the lattice type and the Bravais
lattice of the crystal. The lattice type numbers are
(the numbering starts from 1):
Triclinic (1), Monoclinic (2), Orthorombic (3),
Tetragonal (4), Trigonal (5), Hexagonal (6), Cubic (7)
:param cryst: ASE Atoms object
:returns: tuple (lattice type number (1-7), lattice name, space group
name, space group number)
'''
# Table of lattice types and correcponding group numbers dividing
# the ranges. See get_lattice_type method for precise definition.
lattice_types = [
[3, "Triclinic"],
[16, "Monoclinic"],
[75, "Orthorombic"],
[143, "Tetragonal"],
[168, "Trigonal"],
[195, "Hexagonal"],
[231, "Cubic"]
]
sg = spg.get_spacegroup(cryst)
m = re.match(r'([A-Z].*\b)\s*\(([0-9]*)\)', sg)
sg_name = m.group(1)
sg_nr = int(m.group(2))
for n, l in enumerate(lattice_types):
if sg_nr < l[0]:
bravais = l[1]
lattype = n+1
break
return lattype, bravais, sg_name, sg_nr
def get_bulk_modulus(cryst):
'''Calculate bulk modulus using the Birch-Murnaghan equation of state.
The EOS must be previously calculated by get_BM_EOS routine.
The returned bulk modulus is a :math:`B_0` coefficient of the B-M EOS.
The units of the result are defined by ASE. To get the result in
any particular units (e.g. GPa) you need to divide it by
ase.units.<unit name>::
get_bulk_modulus(cryst)/ase.units.GPa
:param cryst: ASE Atoms object
:returns: float, bulk modulus :math:`B_0` in ASE units.
'''
if getattr(cryst, 'bm_eos', None) is None:
raise RuntimeError('Missing B-M EOS data.')
cryst.bulk_modulus = cryst.bm_eos[1]
return cryst.bulk_modulus
def get_pressure(s):
'''Return *external* isotropic (hydrostatic) pressure in ASE units.
If the pressure is positive the system is under external pressure.
This is a convenience function to convert output of get_stress function
into external pressure.
:param cryst: stress tensor in Voight (vector) notation as returned by
the get_stress() method.
:returns: float, external hydrostatic pressure in ASE units.
'''
return -mean(s[:3])
def get_BM_EOS(cryst, systems):
"""Calculate Birch-Murnaghan Equation of State for the crystal.
The B-M equation of state is defined by:
.. math::
P(V)= \\frac{B_0}{B'_0}\\left[
\\left({\\frac{V}{V_0}}\\right)^{-B'_0} - 1
\\right]
It's coefficients are estimated using n single-point structures ganerated
from the crystal (cryst) by the scan_volumes function between two relative
volumes. The BM EOS is fitted to the computed points by
least squares method. The returned value is a list of fitted
parameters: :math:`V_0, B_0, B_0'` if the fit succeded.
If the fitting fails the ``RuntimeError('Calculation failed')`` is raised.
The data from the calculation and fit is stored in the bm_eos and pv
members of cryst for future reference. You have to provide properly
optimized structures in cryst and systems list.
:param cryst: Atoms object, basic structure
:param systems: A list of calculated structures
:returns: tuple of EOS parameters :math:`V_0, B_0, B_0'`.
"""
pvdat = array([[r.get_volume(),
get_pressure(r.get_stress()),
norm(r.get_cell()[:, 0]),
norm(r.get_cell()[:, 1]),
norm(r.get_cell()[:, 2])] for r in systems]).T
# Estimate the initial guess assuming b0p=1
# Limiting volumes
v1 = min(pvdat[0])
v2 = max(pvdat[0])
# The pressure is falling with the growing volume
p2 = min(pvdat[1])
p1 = max(pvdat[1])
b0 = (p1*v1-p2*v2)/(v2-v1)
v0 = v1*(p1+b0)/b0
# Initial guess
p0 = [v0, b0, 1]
# Fitting
try :
p1, succ = optimize.curve_fit(BMEOS, pvdat[0], pvdat[1], p0)
except (ValueError, RuntimeError, optimize.OptimizeWarning) as ex:
raise RuntimeError('Calculation failed')
cryst.bm_eos = p1
cryst.pv = pvdat
return cryst.bm_eos
def get_elastic_tensor(cryst, systems):
'''Calculate elastic tensor of the crystal.
The elastic tensor is calculated from the stress-strain relation
and derived by fitting this relation to the set of linear equations
build from the symmetry of the crystal and strains and stresses
of the set of elementary deformations of the unit cell.
It is assumed that the crystal is converged and optimized
under intended pressure/stress. The geometry and stress on the
cryst is taken as the reference point. No additional optimization
will be run. Structures in cryst and systems list must have calculated
stresses. The function returns tuple of :math:`C_{ij}` elastic tensor,
raw Birch coefficients :math:`B_{ij}` and fitting results: residuals,
solution rank, singular values returned by numpy.linalg.lstsq.
:param cryst: Atoms object, basic structure
:param systems: list of Atoms object with calculated deformed structures
:returns: tuple(:math:`C_{ij}` float vector,
tuple(:math:`B_{ij}` float vector, residuals, solution rank, singular values))
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
ul = []
sl = []
p = get_pressure(cryst.get_stress())
for g in systems:
ul.append(get_strain(g, refcell=cryst))
# Remove the ambient pressure from the stress tensor
sl.append(g.get_stress()-array([p, p, p, 0, 0, 0]))
# print(symm, ul)
eqm = array([symm(u) for u in ul])
# print(eqm)
# print(eqm[0].shape, eqm.shape)
eqm = reshape(eqm, (eqm.shape[0]*eqm.shape[1], eqm.shape[2]))
# print(eqm)
slm = reshape(array(sl), (-1,))
# print(eqm.shape, slm.shape)
# print(slm)
Bij = lstsq(eqm, slm)
# print(Bij[0] / units.GPa)
# Calculate elastic constants from Birch coeff.
# TODO: Check the sign of the pressure array in the B <=> C relation
if (symm == orthorombic):
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p])
elif (symm == tetragonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, -p])
elif (symm == regular):
Cij = Bij[0] - array([-p, p, -p])
elif (symm == trigonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, p])
elif (symm == hexagonal):
Cij = Bij[0] - array([-p, -p, p, p, -p])
elif (symm == monoclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p, p, p, p, p])
elif (symm == triclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p,
p, p, p, p, p, p, p, p, p])
return Cij, Bij
def scan_pressures(cryst, lo, hi, n=5, eos=None):
'''
Scan the pressure axis from lo to hi (inclusive)
using B-M EOS as the volume predictor.
Pressure (lo, hi) in GPa
'''
# Inverse B-M EOS to get volumes from pressures
# This will work only in limited pressure range p>-B/B'.
# Warning! Relative, the V0 prefactor is removed.
def invbmeos(b, bp, x):
return array([pow(b/(bp*xv+b), 1/(3*bp)) for xv in x])
if eos is None:
raise RuntimeError('Required EOS data missing')
# Limit negative pressures to 90% of the singularity value.
# Beyond this B-M EOS is bound to be wrong anyway.
lo = max(lo, -0.9*eos[1]/eos[2])
scale = (eos[0]/cryst.get_volume())*invbmeos(eos[1], eos[2],
linspace(lo, hi, num=n))
# print(scale)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def scan_volumes(cryst, lo=0.98, hi=1.02, n=5, scale_volumes=True):
'''
Provide set of crystals along volume axis from lo to hi (inclusive).
No volume cell optimization is performed. Bounds are specified as
fractions (1.10 = 10% increase). If scale_volumes==False the scalling
is applied to lattice vectors instead of volumes.
:param lo: lower bound of the V/V_0 in the scan
:param hi: upper bound of the V/V_0 in the scan
:param n: number of volume sample points
:param scale_volumes: If True scale the unit cell volume or,
if False, scale the length of lattice axes.
:returns: a list of deformed systems
'''
scale = linspace(lo, hi, num=n)
if scale_volumes:
scale **= (1.0/3.0)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def get_vecang_cell(cryst, uc=None):
'''
Compute A,B,C, alpha,beta,gamma cell params
from the unit cell matrix (uc) or cryst.
Angles in radians.
'''
if uc is None:
uc = cryst.get_cell()
ucv = [uc[i, :]/norm(uc[i, :]) for i in range(3)]
uca = [acos(dot(ucv[(i+1) % 3], ucv[(i+2) % 3])) for i in range(3)]
return [norm(uc[i, :]) for i in range(3)] + uca
def get_deformed_cell(base_cryst, axis=0, size=1):
'''
Return the cell (with atoms) deformed along one
cell parameter (0,1,2 = a,b,c ; 3,4,5 = alpha,beta,gamma) by
size percent or size degrees (axis/angles).
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
if axis < 3:
uc[axis, :] = (1+size/100.0)*uc[axis, :]
else:
(a, b, c, alp, bet, gam) = get_vecang_cell(cryst)
d = array([0.0, 0.0, 0.0])
d[axis-3] = pi*size/180
(alp, bet, gam) = array((alp, bet, gam))+d
t = 1 - (ctg(bet)*ctg(gam)-cos(alp)*csc(bet)*csc(gam))**2
if t < 0.0:
print('''
The parameters (alpha,beta,gamma)=(%f,%f,%f) are probably
incorrect and lead to imaginary coordinates.
This range of parameters is unsupported by this program
(and is, let me say, very strange for a crystal).
Cennot continue, bye.''' % (alp, bet, gam))
raise ValueError
else:
uc = [[a, 0.0, 0.0],
[b*cos(gam), b*sin(gam), 0],
[c*cos(bet),
c*(cos(alp)/sin(gam) - cos(bet)*ctg(gam)),
c*sin(bet)*sqrt(t)]]
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_cart_deformed_cell(base_cryst, axis=0, size=1):
'''Return the cell deformed along one of the cartesian directions
Creates new deformed structure. The deformation is based on the
base structure and is performed along single axis. The axis is
specified as follows: 0,1,2 = x,y,z ; sheers: 3,4,5 = yz, xz, xy.
The size of the deformation is in percent and degrees, respectively.
:param base_cryst: structure to be deformed
:param axis: direction of deformation
:param size: size of the deformation
:returns: new, deformed structure
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
s = size/100.0
L = diag(ones(3))
if axis < 3:
L[axis, axis] += s
else:
if axis == 3:
L[1, 2] += s
elif axis == 4:
L[0, 2] += s
else:
L[0, 1] += s
uc = dot(uc, L)
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_strain(cryst, refcell=None):
'''Calculate strain tensor in the Voight notation
Computes the strain tensor in the Voight notation as a conventional
6-vector. The calculation is done with respect to the crystal
geometry passed in refcell parameter.
:param cryst: deformed structure
:param refcell: reference, undeformed structure
:returns: 6-vector of strain tensor in the Voight notation
'''
if refcell is None:
refcell = cryst
du = cryst.get_cell()-refcell.get_cell()
m = refcell.get_cell()
m = inv(m)
u = dot(m, du)
u = (u+u.T)/2
return array([u[0, 0], u[1, 1], u[2, 2], u[2, 1], u[2, 0], u[1, 0]])
if __name__ == '__main__':
from ase.spacegroup import crystal
a = 4.194
cryst = crystal(['Mg', 'O'],
[(0, 0, 0), (0.5, 0.5, 0.5)],
spacegroup=225,
cellpar=[a, a, a, 90, 90, 90])
sl = scan_volumes(cryst)
print('Volumes: ', end='')
for c in sl:
print('%.2f (%.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end=' ')
print()
sl = get_elementary_deformations(cryst)
print('Structures: ')
print(' Vol A B C alph bet gam')
for n, c in enumerate(sl):
print('%.4f (%5.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end='')
print((3*' %7.4f' + ' ' + 3*' %7.2f') %
tuple(c.get_cell_lengths_and_angles()))
|
jochym/Elastic | elastic/elastic.py | get_elastic_tensor | python | def get_elastic_tensor(cryst, systems):
'''Calculate elastic tensor of the crystal.
The elastic tensor is calculated from the stress-strain relation
and derived by fitting this relation to the set of linear equations
build from the symmetry of the crystal and strains and stresses
of the set of elementary deformations of the unit cell.
It is assumed that the crystal is converged and optimized
under intended pressure/stress. The geometry and stress on the
cryst is taken as the reference point. No additional optimization
will be run. Structures in cryst and systems list must have calculated
stresses. The function returns tuple of :math:`C_{ij}` elastic tensor,
raw Birch coefficients :math:`B_{ij}` and fitting results: residuals,
solution rank, singular values returned by numpy.linalg.lstsq.
:param cryst: Atoms object, basic structure
:param systems: list of Atoms object with calculated deformed structures
:returns: tuple(:math:`C_{ij}` float vector,
tuple(:math:`B_{ij}` float vector, residuals, solution rank, singular values))
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
ul = []
sl = []
p = get_pressure(cryst.get_stress())
for g in systems:
ul.append(get_strain(g, refcell=cryst))
# Remove the ambient pressure from the stress tensor
sl.append(g.get_stress()-array([p, p, p, 0, 0, 0]))
# print(symm, ul)
eqm = array([symm(u) for u in ul])
# print(eqm)
# print(eqm[0].shape, eqm.shape)
eqm = reshape(eqm, (eqm.shape[0]*eqm.shape[1], eqm.shape[2]))
# print(eqm)
slm = reshape(array(sl), (-1,))
# print(eqm.shape, slm.shape)
# print(slm)
Bij = lstsq(eqm, slm)
# print(Bij[0] / units.GPa)
# Calculate elastic constants from Birch coeff.
# TODO: Check the sign of the pressure array in the B <=> C relation
if (symm == orthorombic):
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p])
elif (symm == tetragonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, -p])
elif (symm == regular):
Cij = Bij[0] - array([-p, p, -p])
elif (symm == trigonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, p])
elif (symm == hexagonal):
Cij = Bij[0] - array([-p, -p, p, p, -p])
elif (symm == monoclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p, p, p, p, p])
elif (symm == triclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p,
p, p, p, p, p, p, p, p, p])
return Cij, Bij | Calculate elastic tensor of the crystal.
The elastic tensor is calculated from the stress-strain relation
and derived by fitting this relation to the set of linear equations
build from the symmetry of the crystal and strains and stresses
of the set of elementary deformations of the unit cell.
It is assumed that the crystal is converged and optimized
under intended pressure/stress. The geometry and stress on the
cryst is taken as the reference point. No additional optimization
will be run. Structures in cryst and systems list must have calculated
stresses. The function returns tuple of :math:`C_{ij}` elastic tensor,
raw Birch coefficients :math:`B_{ij}` and fitting results: residuals,
solution rank, singular values returned by numpy.linalg.lstsq.
:param cryst: Atoms object, basic structure
:param systems: list of Atoms object with calculated deformed structures
:returns: tuple(:math:`C_{ij}` float vector,
tuple(:math:`B_{ij}` float vector, residuals, solution rank, singular values)) | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/elastic/elastic.py#L485-L562 | [
"def get_pressure(s):\n '''Return *external* isotropic (hydrostatic) pressure in ASE units.\n\n If the pressure is positive the system is under external pressure.\n This is a convenience function to convert output of get_stress function\n into external pressure.\n\n :param cryst: stress tensor in Voi... | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 1998-2017 by Paweł T. Jochym <pawel.jochym@ifj.edu.pl>
#
# This file is part of Elastic.
# Elastic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Elastic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Elastic. If not, see <http://www.gnu.org/licenses/>.
'''
.. _elastic-mod:
Elastic Module
^^^^^^^^^^^^^^
Elastic is a module for calculation of :math:`C_{ij}` components of elastic
tensor from the strain-stress relation.
The strain components here are ordered in standard way which is different
to ordering in previous versions of the code (up to 4.0).
The ordering is: :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}`.
The general ordering of :math:`C_{ij}` components is (except for triclinic
symmetry and taking into account customary names of constants - e.g.
:math:`C_{16} \\rightarrow C_{14}`):
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
The functions with the name of bravais lattices define the symmetry of the
:math:`C_{ij}` matrix. The matrix is N columns by 6 rows where the columns
corespond to independent elastic constants of the given crystal, while the rows
corespond to the canonical deformations of a crystal. The elements are the
second partial derivatives of the free energy formula for the crystal written
down as a quadratic form of the deformations with respect to elastic constant
and deformation.
*Note:*
The elements for deformations :math:`u_{xy}, u_{xz}, u_{yz}`
have to be divided by 2 to properly match the usual definition
of elastic constants.
See: [LL]_ L.D. Landau, E.M. Lifszyc, "Theory of elasticity"
There is some usefull summary also at:
`ScienceWorld <http://scienceworld.wolfram.com/physics/Elasticity.html>`_
---------
'''
from __future__ import print_function, division, absolute_import
import re
from ase.atoms import Atoms
try:
# Try new release of spglib
import spglib as spg
except ImportError:
# Old naming scheme
from pyspglib import spglib as spg
from scipy.linalg import norm, lstsq
from scipy import optimize
from numpy.linalg import inv
from numpy import dot, diag, ones, reshape, linspace, array, mean
from math import acos, pi, cos, sin, sqrt
def BMEOS(v, v0, b0, b0p):
return (b0/b0p)*(pow(v0/v, b0p) - 1)
def ctg(x):
return cos(x)/sin(x)
def csc(x):
return 1/sin(x)
def regular(u):
'''
Equation matrix generation for the regular (cubic) lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{12}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, uyy + uzz, 0],
[uyy, uxx + uzz, 0],
[uzz, uxx + uyy, 0],
[0, 0, 2*uyz],
[0, 0, 2*uxz],
[0, 0, 2*uxy]])
def tetragonal(u):
'''
Equation matrix generation for the tetragonal lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, uyy, uzz, 0, 0],
[uyy, 0, uxx, uzz, 0, 0],
[0, uzz, 0, uxx+uyy, 0, 0],
[0, 0, 0, 0, 0, 2*uxy],
[0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 2*uyz, 0]])
def orthorombic(u):
'''
Equation matrix generation for the orthorombic lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0, uyy, uzz, 0, 0, 0, 0],
[0, uyy, 0, uxx, 0, uzz, 0, 0, 0],
[0, 0, uzz, 0, uxx, uyy, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 2*uyz, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 2*uxy]])
def trigonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Not tested yet.
# TODO: There is still some doubt about the :math:`C_{14}` constant.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0, 2*uxz ],
[ uyy, 0, uxx, uzz, 0, -2*uxz ],
[ 0, uzz, 0, uxx+uyy, 0, 0 ],
[ 0, 0, 0, 0, 2*uyz, -4*uxy ],
[ 0, 0, 0, 0, 2*uxz, 2*(uxx-uyy)],
[ 2*uxy, 0, -2*uxy, 0, 0, -4*uyz ]])
def hexagonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Still needs good verification
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0 ],
[ uyy, 0, uxx, uzz, 0 ],
[ 0, uzz, 0, uxx+uyy, 0 ],
[ 0, 0, 0, 0, 2*uyz ],
[ 0, 0, 0, 0, 2*uxz ],
[ 2*uxy, 0, -2*uxy, 0, 0 ]])
def monoclinic(u):
'''Monoclinic group,
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz, 0]])
def triclinic(u):
'''Triclinic crystals.
*Note*: This was never tested on the real case. Beware!
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33},
C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66},
C_{16}, C_{26}, C_{36}, C_{46}, C_{56},
C_{14}, C_{15}, C_{25}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# Based on the monoclinic matrix and not tested on real case.
# If you have test cases for this symmetry send them to the author.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0,uyz,uxz, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0,uxz, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxy, 0,uxx, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0, 0,uxy, 0,uxx,uyy,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz,uyz,uxz, 0, 0, 0, 0]])
def get_cij_order(cryst):
'''Give order of of elastic constants for the structure
:param cryst: ASE Atoms object
:returns: Order of elastic constants as a tuple of strings: C_ij
'''
orders = {
1: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36',
'C_46', 'C_56', 'C_14', 'C_15', 'C_25', 'C_45'),
2: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36', 'C_45'),
3: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23', 'C_44',
'C_55', 'C_66'),
4: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
5: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
6: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44'),
7: ('C_11', 'C_12', 'C_44'),
}
return orders[get_lattice_type(cryst)[0]]
def get_lattice_type(cryst):
'''Find the symmetry of the crystal using spglib symmetry finder.
Derive name of the space group and its number extracted from the result.
Based on the group number identify also the lattice type and the Bravais
lattice of the crystal. The lattice type numbers are
(the numbering starts from 1):
Triclinic (1), Monoclinic (2), Orthorombic (3),
Tetragonal (4), Trigonal (5), Hexagonal (6), Cubic (7)
:param cryst: ASE Atoms object
:returns: tuple (lattice type number (1-7), lattice name, space group
name, space group number)
'''
# Table of lattice types and correcponding group numbers dividing
# the ranges. See get_lattice_type method for precise definition.
lattice_types = [
[3, "Triclinic"],
[16, "Monoclinic"],
[75, "Orthorombic"],
[143, "Tetragonal"],
[168, "Trigonal"],
[195, "Hexagonal"],
[231, "Cubic"]
]
sg = spg.get_spacegroup(cryst)
m = re.match(r'([A-Z].*\b)\s*\(([0-9]*)\)', sg)
sg_name = m.group(1)
sg_nr = int(m.group(2))
for n, l in enumerate(lattice_types):
if sg_nr < l[0]:
bravais = l[1]
lattype = n+1
break
return lattype, bravais, sg_name, sg_nr
def get_bulk_modulus(cryst):
'''Calculate bulk modulus using the Birch-Murnaghan equation of state.
The EOS must be previously calculated by get_BM_EOS routine.
The returned bulk modulus is a :math:`B_0` coefficient of the B-M EOS.
The units of the result are defined by ASE. To get the result in
any particular units (e.g. GPa) you need to divide it by
ase.units.<unit name>::
get_bulk_modulus(cryst)/ase.units.GPa
:param cryst: ASE Atoms object
:returns: float, bulk modulus :math:`B_0` in ASE units.
'''
if getattr(cryst, 'bm_eos', None) is None:
raise RuntimeError('Missing B-M EOS data.')
cryst.bulk_modulus = cryst.bm_eos[1]
return cryst.bulk_modulus
def get_pressure(s):
'''Return *external* isotropic (hydrostatic) pressure in ASE units.
If the pressure is positive the system is under external pressure.
This is a convenience function to convert output of get_stress function
into external pressure.
:param cryst: stress tensor in Voight (vector) notation as returned by
the get_stress() method.
:returns: float, external hydrostatic pressure in ASE units.
'''
return -mean(s[:3])
def get_BM_EOS(cryst, systems):
"""Calculate Birch-Murnaghan Equation of State for the crystal.
The B-M equation of state is defined by:
.. math::
P(V)= \\frac{B_0}{B'_0}\\left[
\\left({\\frac{V}{V_0}}\\right)^{-B'_0} - 1
\\right]
It's coefficients are estimated using n single-point structures ganerated
from the crystal (cryst) by the scan_volumes function between two relative
volumes. The BM EOS is fitted to the computed points by
least squares method. The returned value is a list of fitted
parameters: :math:`V_0, B_0, B_0'` if the fit succeded.
If the fitting fails the ``RuntimeError('Calculation failed')`` is raised.
The data from the calculation and fit is stored in the bm_eos and pv
members of cryst for future reference. You have to provide properly
optimized structures in cryst and systems list.
:param cryst: Atoms object, basic structure
:param systems: A list of calculated structures
:returns: tuple of EOS parameters :math:`V_0, B_0, B_0'`.
"""
pvdat = array([[r.get_volume(),
get_pressure(r.get_stress()),
norm(r.get_cell()[:, 0]),
norm(r.get_cell()[:, 1]),
norm(r.get_cell()[:, 2])] for r in systems]).T
# Estimate the initial guess assuming b0p=1
# Limiting volumes
v1 = min(pvdat[0])
v2 = max(pvdat[0])
# The pressure is falling with the growing volume
p2 = min(pvdat[1])
p1 = max(pvdat[1])
b0 = (p1*v1-p2*v2)/(v2-v1)
v0 = v1*(p1+b0)/b0
# Initial guess
p0 = [v0, b0, 1]
# Fitting
try :
p1, succ = optimize.curve_fit(BMEOS, pvdat[0], pvdat[1], p0)
except (ValueError, RuntimeError, optimize.OptimizeWarning) as ex:
raise RuntimeError('Calculation failed')
cryst.bm_eos = p1
cryst.pv = pvdat
return cryst.bm_eos
def get_elementary_deformations(cryst, n=5, d=2):
'''Generate elementary deformations for elastic tensor calculation.
The deformations are created based on the symmetry of the crystal and
are limited to the non-equivalet axes of the crystal.
:param cryst: Atoms object, basic structure
:param n: integer, number of deformations per non-equivalent axis
:param d: float, size of the maximum deformation in percent and degrees
:returns: list of deformed structures
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
systems = []
for a in axis:
if a < 3: # tetragonal deformation
for dx in linspace(-d, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
elif a < 6: # sheer deformation (skip the zero angle)
for dx in linspace(d/10.0, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
return systems
def scan_pressures(cryst, lo, hi, n=5, eos=None):
'''
Scan the pressure axis from lo to hi (inclusive)
using B-M EOS as the volume predictor.
Pressure (lo, hi) in GPa
'''
# Inverse B-M EOS to get volumes from pressures
# This will work only in limited pressure range p>-B/B'.
# Warning! Relative, the V0 prefactor is removed.
def invbmeos(b, bp, x):
return array([pow(b/(bp*xv+b), 1/(3*bp)) for xv in x])
if eos is None:
raise RuntimeError('Required EOS data missing')
# Limit negative pressures to 90% of the singularity value.
# Beyond this B-M EOS is bound to be wrong anyway.
lo = max(lo, -0.9*eos[1]/eos[2])
scale = (eos[0]/cryst.get_volume())*invbmeos(eos[1], eos[2],
linspace(lo, hi, num=n))
# print(scale)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def scan_volumes(cryst, lo=0.98, hi=1.02, n=5, scale_volumes=True):
'''
Provide set of crystals along volume axis from lo to hi (inclusive).
No volume cell optimization is performed. Bounds are specified as
fractions (1.10 = 10% increase). If scale_volumes==False the scalling
is applied to lattice vectors instead of volumes.
:param lo: lower bound of the V/V_0 in the scan
:param hi: upper bound of the V/V_0 in the scan
:param n: number of volume sample points
:param scale_volumes: If True scale the unit cell volume or,
if False, scale the length of lattice axes.
:returns: a list of deformed systems
'''
scale = linspace(lo, hi, num=n)
if scale_volumes:
scale **= (1.0/3.0)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def get_vecang_cell(cryst, uc=None):
'''
Compute A,B,C, alpha,beta,gamma cell params
from the unit cell matrix (uc) or cryst.
Angles in radians.
'''
if uc is None:
uc = cryst.get_cell()
ucv = [uc[i, :]/norm(uc[i, :]) for i in range(3)]
uca = [acos(dot(ucv[(i+1) % 3], ucv[(i+2) % 3])) for i in range(3)]
return [norm(uc[i, :]) for i in range(3)] + uca
def get_deformed_cell(base_cryst, axis=0, size=1):
'''
Return the cell (with atoms) deformed along one
cell parameter (0,1,2 = a,b,c ; 3,4,5 = alpha,beta,gamma) by
size percent or size degrees (axis/angles).
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
if axis < 3:
uc[axis, :] = (1+size/100.0)*uc[axis, :]
else:
(a, b, c, alp, bet, gam) = get_vecang_cell(cryst)
d = array([0.0, 0.0, 0.0])
d[axis-3] = pi*size/180
(alp, bet, gam) = array((alp, bet, gam))+d
t = 1 - (ctg(bet)*ctg(gam)-cos(alp)*csc(bet)*csc(gam))**2
if t < 0.0:
print('''
The parameters (alpha,beta,gamma)=(%f,%f,%f) are probably
incorrect and lead to imaginary coordinates.
This range of parameters is unsupported by this program
(and is, let me say, very strange for a crystal).
Cennot continue, bye.''' % (alp, bet, gam))
raise ValueError
else:
uc = [[a, 0.0, 0.0],
[b*cos(gam), b*sin(gam), 0],
[c*cos(bet),
c*(cos(alp)/sin(gam) - cos(bet)*ctg(gam)),
c*sin(bet)*sqrt(t)]]
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_cart_deformed_cell(base_cryst, axis=0, size=1):
'''Return the cell deformed along one of the cartesian directions
Creates new deformed structure. The deformation is based on the
base structure and is performed along single axis. The axis is
specified as follows: 0,1,2 = x,y,z ; sheers: 3,4,5 = yz, xz, xy.
The size of the deformation is in percent and degrees, respectively.
:param base_cryst: structure to be deformed
:param axis: direction of deformation
:param size: size of the deformation
:returns: new, deformed structure
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
s = size/100.0
L = diag(ones(3))
if axis < 3:
L[axis, axis] += s
else:
if axis == 3:
L[1, 2] += s
elif axis == 4:
L[0, 2] += s
else:
L[0, 1] += s
uc = dot(uc, L)
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_strain(cryst, refcell=None):
'''Calculate strain tensor in the Voight notation
Computes the strain tensor in the Voight notation as a conventional
6-vector. The calculation is done with respect to the crystal
geometry passed in refcell parameter.
:param cryst: deformed structure
:param refcell: reference, undeformed structure
:returns: 6-vector of strain tensor in the Voight notation
'''
if refcell is None:
refcell = cryst
du = cryst.get_cell()-refcell.get_cell()
m = refcell.get_cell()
m = inv(m)
u = dot(m, du)
u = (u+u.T)/2
return array([u[0, 0], u[1, 1], u[2, 2], u[2, 1], u[2, 0], u[1, 0]])
if __name__ == '__main__':
from ase.spacegroup import crystal
a = 4.194
cryst = crystal(['Mg', 'O'],
[(0, 0, 0), (0.5, 0.5, 0.5)],
spacegroup=225,
cellpar=[a, a, a, 90, 90, 90])
sl = scan_volumes(cryst)
print('Volumes: ', end='')
for c in sl:
print('%.2f (%.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end=' ')
print()
sl = get_elementary_deformations(cryst)
print('Structures: ')
print(' Vol A B C alph bet gam')
for n, c in enumerate(sl):
print('%.4f (%5.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end='')
print((3*' %7.4f' + ' ' + 3*' %7.2f') %
tuple(c.get_cell_lengths_and_angles()))
|
jochym/Elastic | elastic/elastic.py | scan_pressures | python | def scan_pressures(cryst, lo, hi, n=5, eos=None):
'''
Scan the pressure axis from lo to hi (inclusive)
using B-M EOS as the volume predictor.
Pressure (lo, hi) in GPa
'''
# Inverse B-M EOS to get volumes from pressures
# This will work only in limited pressure range p>-B/B'.
# Warning! Relative, the V0 prefactor is removed.
def invbmeos(b, bp, x):
return array([pow(b/(bp*xv+b), 1/(3*bp)) for xv in x])
if eos is None:
raise RuntimeError('Required EOS data missing')
# Limit negative pressures to 90% of the singularity value.
# Beyond this B-M EOS is bound to be wrong anyway.
lo = max(lo, -0.9*eos[1]/eos[2])
scale = (eos[0]/cryst.get_volume())*invbmeos(eos[1], eos[2],
linspace(lo, hi, num=n))
# print(scale)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems | Scan the pressure axis from lo to hi (inclusive)
using B-M EOS as the volume predictor.
Pressure (lo, hi) in GPa | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/elastic/elastic.py#L565-L592 | [
"def invbmeos(b, bp, x):\n return array([pow(b/(bp*xv+b), 1/(3*bp)) for xv in x])\n"
] | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 1998-2017 by Paweł T. Jochym <pawel.jochym@ifj.edu.pl>
#
# This file is part of Elastic.
# Elastic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Elastic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Elastic. If not, see <http://www.gnu.org/licenses/>.
'''
.. _elastic-mod:
Elastic Module
^^^^^^^^^^^^^^
Elastic is a module for calculation of :math:`C_{ij}` components of elastic
tensor from the strain-stress relation.
The strain components here are ordered in standard way which is different
to ordering in previous versions of the code (up to 4.0).
The ordering is: :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}`.
The general ordering of :math:`C_{ij}` components is (except for triclinic
symmetry and taking into account customary names of constants - e.g.
:math:`C_{16} \\rightarrow C_{14}`):
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
The functions with the name of bravais lattices define the symmetry of the
:math:`C_{ij}` matrix. The matrix is N columns by 6 rows where the columns
corespond to independent elastic constants of the given crystal, while the rows
corespond to the canonical deformations of a crystal. The elements are the
second partial derivatives of the free energy formula for the crystal written
down as a quadratic form of the deformations with respect to elastic constant
and deformation.
*Note:*
The elements for deformations :math:`u_{xy}, u_{xz}, u_{yz}`
have to be divided by 2 to properly match the usual definition
of elastic constants.
See: [LL]_ L.D. Landau, E.M. Lifszyc, "Theory of elasticity"
There is some usefull summary also at:
`ScienceWorld <http://scienceworld.wolfram.com/physics/Elasticity.html>`_
---------
'''
from __future__ import print_function, division, absolute_import
import re
from ase.atoms import Atoms
try:
# Try new release of spglib
import spglib as spg
except ImportError:
# Old naming scheme
from pyspglib import spglib as spg
from scipy.linalg import norm, lstsq
from scipy import optimize
from numpy.linalg import inv
from numpy import dot, diag, ones, reshape, linspace, array, mean
from math import acos, pi, cos, sin, sqrt
def BMEOS(v, v0, b0, b0p):
return (b0/b0p)*(pow(v0/v, b0p) - 1)
def ctg(x):
return cos(x)/sin(x)
def csc(x):
return 1/sin(x)
def regular(u):
'''
Equation matrix generation for the regular (cubic) lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{12}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, uyy + uzz, 0],
[uyy, uxx + uzz, 0],
[uzz, uxx + uyy, 0],
[0, 0, 2*uyz],
[0, 0, 2*uxz],
[0, 0, 2*uxy]])
def tetragonal(u):
'''
Equation matrix generation for the tetragonal lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, uyy, uzz, 0, 0],
[uyy, 0, uxx, uzz, 0, 0],
[0, uzz, 0, uxx+uyy, 0, 0],
[0, 0, 0, 0, 0, 2*uxy],
[0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 2*uyz, 0]])
def orthorombic(u):
'''
Equation matrix generation for the orthorombic lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0, uyy, uzz, 0, 0, 0, 0],
[0, uyy, 0, uxx, 0, uzz, 0, 0, 0],
[0, 0, uzz, 0, uxx, uyy, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 2*uyz, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 2*uxy]])
def trigonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Not tested yet.
# TODO: There is still some doubt about the :math:`C_{14}` constant.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0, 2*uxz ],
[ uyy, 0, uxx, uzz, 0, -2*uxz ],
[ 0, uzz, 0, uxx+uyy, 0, 0 ],
[ 0, 0, 0, 0, 2*uyz, -4*uxy ],
[ 0, 0, 0, 0, 2*uxz, 2*(uxx-uyy)],
[ 2*uxy, 0, -2*uxy, 0, 0, -4*uyz ]])
def hexagonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Still needs good verification
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0 ],
[ uyy, 0, uxx, uzz, 0 ],
[ 0, uzz, 0, uxx+uyy, 0 ],
[ 0, 0, 0, 0, 2*uyz ],
[ 0, 0, 0, 0, 2*uxz ],
[ 2*uxy, 0, -2*uxy, 0, 0 ]])
def monoclinic(u):
'''Monoclinic group,
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz, 0]])
def triclinic(u):
'''Triclinic crystals.
*Note*: This was never tested on the real case. Beware!
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33},
C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66},
C_{16}, C_{26}, C_{36}, C_{46}, C_{56},
C_{14}, C_{15}, C_{25}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# Based on the monoclinic matrix and not tested on real case.
# If you have test cases for this symmetry send them to the author.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0,uyz,uxz, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0,uxz, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxy, 0,uxx, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0, 0,uxy, 0,uxx,uyy,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz,uyz,uxz, 0, 0, 0, 0]])
def get_cij_order(cryst):
'''Give order of of elastic constants for the structure
:param cryst: ASE Atoms object
:returns: Order of elastic constants as a tuple of strings: C_ij
'''
orders = {
1: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36',
'C_46', 'C_56', 'C_14', 'C_15', 'C_25', 'C_45'),
2: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36', 'C_45'),
3: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23', 'C_44',
'C_55', 'C_66'),
4: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
5: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
6: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44'),
7: ('C_11', 'C_12', 'C_44'),
}
return orders[get_lattice_type(cryst)[0]]
def get_lattice_type(cryst):
'''Find the symmetry of the crystal using spglib symmetry finder.
Derive name of the space group and its number extracted from the result.
Based on the group number identify also the lattice type and the Bravais
lattice of the crystal. The lattice type numbers are
(the numbering starts from 1):
Triclinic (1), Monoclinic (2), Orthorombic (3),
Tetragonal (4), Trigonal (5), Hexagonal (6), Cubic (7)
:param cryst: ASE Atoms object
:returns: tuple (lattice type number (1-7), lattice name, space group
name, space group number)
'''
# Table of lattice types and correcponding group numbers dividing
# the ranges. See get_lattice_type method for precise definition.
lattice_types = [
[3, "Triclinic"],
[16, "Monoclinic"],
[75, "Orthorombic"],
[143, "Tetragonal"],
[168, "Trigonal"],
[195, "Hexagonal"],
[231, "Cubic"]
]
sg = spg.get_spacegroup(cryst)
m = re.match(r'([A-Z].*\b)\s*\(([0-9]*)\)', sg)
sg_name = m.group(1)
sg_nr = int(m.group(2))
for n, l in enumerate(lattice_types):
if sg_nr < l[0]:
bravais = l[1]
lattype = n+1
break
return lattype, bravais, sg_name, sg_nr
def get_bulk_modulus(cryst):
'''Calculate bulk modulus using the Birch-Murnaghan equation of state.
The EOS must be previously calculated by get_BM_EOS routine.
The returned bulk modulus is a :math:`B_0` coefficient of the B-M EOS.
The units of the result are defined by ASE. To get the result in
any particular units (e.g. GPa) you need to divide it by
ase.units.<unit name>::
get_bulk_modulus(cryst)/ase.units.GPa
:param cryst: ASE Atoms object
:returns: float, bulk modulus :math:`B_0` in ASE units.
'''
if getattr(cryst, 'bm_eos', None) is None:
raise RuntimeError('Missing B-M EOS data.')
cryst.bulk_modulus = cryst.bm_eos[1]
return cryst.bulk_modulus
def get_pressure(s):
'''Return *external* isotropic (hydrostatic) pressure in ASE units.
If the pressure is positive the system is under external pressure.
This is a convenience function to convert output of get_stress function
into external pressure.
:param cryst: stress tensor in Voight (vector) notation as returned by
the get_stress() method.
:returns: float, external hydrostatic pressure in ASE units.
'''
return -mean(s[:3])
def get_BM_EOS(cryst, systems):
"""Calculate Birch-Murnaghan Equation of State for the crystal.
The B-M equation of state is defined by:
.. math::
P(V)= \\frac{B_0}{B'_0}\\left[
\\left({\\frac{V}{V_0}}\\right)^{-B'_0} - 1
\\right]
It's coefficients are estimated using n single-point structures ganerated
from the crystal (cryst) by the scan_volumes function between two relative
volumes. The BM EOS is fitted to the computed points by
least squares method. The returned value is a list of fitted
parameters: :math:`V_0, B_0, B_0'` if the fit succeded.
If the fitting fails the ``RuntimeError('Calculation failed')`` is raised.
The data from the calculation and fit is stored in the bm_eos and pv
members of cryst for future reference. You have to provide properly
optimized structures in cryst and systems list.
:param cryst: Atoms object, basic structure
:param systems: A list of calculated structures
:returns: tuple of EOS parameters :math:`V_0, B_0, B_0'`.
"""
pvdat = array([[r.get_volume(),
get_pressure(r.get_stress()),
norm(r.get_cell()[:, 0]),
norm(r.get_cell()[:, 1]),
norm(r.get_cell()[:, 2])] for r in systems]).T
# Estimate the initial guess assuming b0p=1
# Limiting volumes
v1 = min(pvdat[0])
v2 = max(pvdat[0])
# The pressure is falling with the growing volume
p2 = min(pvdat[1])
p1 = max(pvdat[1])
b0 = (p1*v1-p2*v2)/(v2-v1)
v0 = v1*(p1+b0)/b0
# Initial guess
p0 = [v0, b0, 1]
# Fitting
try :
p1, succ = optimize.curve_fit(BMEOS, pvdat[0], pvdat[1], p0)
except (ValueError, RuntimeError, optimize.OptimizeWarning) as ex:
raise RuntimeError('Calculation failed')
cryst.bm_eos = p1
cryst.pv = pvdat
return cryst.bm_eos
def get_elementary_deformations(cryst, n=5, d=2):
'''Generate elementary deformations for elastic tensor calculation.
The deformations are created based on the symmetry of the crystal and
are limited to the non-equivalet axes of the crystal.
:param cryst: Atoms object, basic structure
:param n: integer, number of deformations per non-equivalent axis
:param d: float, size of the maximum deformation in percent and degrees
:returns: list of deformed structures
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
systems = []
for a in axis:
if a < 3: # tetragonal deformation
for dx in linspace(-d, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
elif a < 6: # sheer deformation (skip the zero angle)
for dx in linspace(d/10.0, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
return systems
def get_elastic_tensor(cryst, systems):
'''Calculate elastic tensor of the crystal.
The elastic tensor is calculated from the stress-strain relation
and derived by fitting this relation to the set of linear equations
build from the symmetry of the crystal and strains and stresses
of the set of elementary deformations of the unit cell.
It is assumed that the crystal is converged and optimized
under intended pressure/stress. The geometry and stress on the
cryst is taken as the reference point. No additional optimization
will be run. Structures in cryst and systems list must have calculated
stresses. The function returns tuple of :math:`C_{ij}` elastic tensor,
raw Birch coefficients :math:`B_{ij}` and fitting results: residuals,
solution rank, singular values returned by numpy.linalg.lstsq.
:param cryst: Atoms object, basic structure
:param systems: list of Atoms object with calculated deformed structures
:returns: tuple(:math:`C_{ij}` float vector,
tuple(:math:`B_{ij}` float vector, residuals, solution rank, singular values))
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
ul = []
sl = []
p = get_pressure(cryst.get_stress())
for g in systems:
ul.append(get_strain(g, refcell=cryst))
# Remove the ambient pressure from the stress tensor
sl.append(g.get_stress()-array([p, p, p, 0, 0, 0]))
# print(symm, ul)
eqm = array([symm(u) for u in ul])
# print(eqm)
# print(eqm[0].shape, eqm.shape)
eqm = reshape(eqm, (eqm.shape[0]*eqm.shape[1], eqm.shape[2]))
# print(eqm)
slm = reshape(array(sl), (-1,))
# print(eqm.shape, slm.shape)
# print(slm)
Bij = lstsq(eqm, slm)
# print(Bij[0] / units.GPa)
# Calculate elastic constants from Birch coeff.
# TODO: Check the sign of the pressure array in the B <=> C relation
if (symm == orthorombic):
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p])
elif (symm == tetragonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, -p])
elif (symm == regular):
Cij = Bij[0] - array([-p, p, -p])
elif (symm == trigonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, p])
elif (symm == hexagonal):
Cij = Bij[0] - array([-p, -p, p, p, -p])
elif (symm == monoclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p, p, p, p, p])
elif (symm == triclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p,
p, p, p, p, p, p, p, p, p])
return Cij, Bij
def scan_volumes(cryst, lo=0.98, hi=1.02, n=5, scale_volumes=True):
'''
Provide set of crystals along volume axis from lo to hi (inclusive).
No volume cell optimization is performed. Bounds are specified as
fractions (1.10 = 10% increase). If scale_volumes==False the scalling
is applied to lattice vectors instead of volumes.
:param lo: lower bound of the V/V_0 in the scan
:param hi: upper bound of the V/V_0 in the scan
:param n: number of volume sample points
:param scale_volumes: If True scale the unit cell volume or,
if False, scale the length of lattice axes.
:returns: a list of deformed systems
'''
scale = linspace(lo, hi, num=n)
if scale_volumes:
scale **= (1.0/3.0)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def get_vecang_cell(cryst, uc=None):
'''
Compute A,B,C, alpha,beta,gamma cell params
from the unit cell matrix (uc) or cryst.
Angles in radians.
'''
if uc is None:
uc = cryst.get_cell()
ucv = [uc[i, :]/norm(uc[i, :]) for i in range(3)]
uca = [acos(dot(ucv[(i+1) % 3], ucv[(i+2) % 3])) for i in range(3)]
return [norm(uc[i, :]) for i in range(3)] + uca
def get_deformed_cell(base_cryst, axis=0, size=1):
'''
Return the cell (with atoms) deformed along one
cell parameter (0,1,2 = a,b,c ; 3,4,5 = alpha,beta,gamma) by
size percent or size degrees (axis/angles).
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
if axis < 3:
uc[axis, :] = (1+size/100.0)*uc[axis, :]
else:
(a, b, c, alp, bet, gam) = get_vecang_cell(cryst)
d = array([0.0, 0.0, 0.0])
d[axis-3] = pi*size/180
(alp, bet, gam) = array((alp, bet, gam))+d
t = 1 - (ctg(bet)*ctg(gam)-cos(alp)*csc(bet)*csc(gam))**2
if t < 0.0:
print('''
The parameters (alpha,beta,gamma)=(%f,%f,%f) are probably
incorrect and lead to imaginary coordinates.
This range of parameters is unsupported by this program
(and is, let me say, very strange for a crystal).
Cennot continue, bye.''' % (alp, bet, gam))
raise ValueError
else:
uc = [[a, 0.0, 0.0],
[b*cos(gam), b*sin(gam), 0],
[c*cos(bet),
c*(cos(alp)/sin(gam) - cos(bet)*ctg(gam)),
c*sin(bet)*sqrt(t)]]
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_cart_deformed_cell(base_cryst, axis=0, size=1):
'''Return the cell deformed along one of the cartesian directions
Creates new deformed structure. The deformation is based on the
base structure and is performed along single axis. The axis is
specified as follows: 0,1,2 = x,y,z ; sheers: 3,4,5 = yz, xz, xy.
The size of the deformation is in percent and degrees, respectively.
:param base_cryst: structure to be deformed
:param axis: direction of deformation
:param size: size of the deformation
:returns: new, deformed structure
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
s = size/100.0
L = diag(ones(3))
if axis < 3:
L[axis, axis] += s
else:
if axis == 3:
L[1, 2] += s
elif axis == 4:
L[0, 2] += s
else:
L[0, 1] += s
uc = dot(uc, L)
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_strain(cryst, refcell=None):
'''Calculate strain tensor in the Voight notation
Computes the strain tensor in the Voight notation as a conventional
6-vector. The calculation is done with respect to the crystal
geometry passed in refcell parameter.
:param cryst: deformed structure
:param refcell: reference, undeformed structure
:returns: 6-vector of strain tensor in the Voight notation
'''
if refcell is None:
refcell = cryst
du = cryst.get_cell()-refcell.get_cell()
m = refcell.get_cell()
m = inv(m)
u = dot(m, du)
u = (u+u.T)/2
return array([u[0, 0], u[1, 1], u[2, 2], u[2, 1], u[2, 0], u[1, 0]])
if __name__ == '__main__':
from ase.spacegroup import crystal
a = 4.194
cryst = crystal(['Mg', 'O'],
[(0, 0, 0), (0.5, 0.5, 0.5)],
spacegroup=225,
cellpar=[a, a, a, 90, 90, 90])
sl = scan_volumes(cryst)
print('Volumes: ', end='')
for c in sl:
print('%.2f (%.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end=' ')
print()
sl = get_elementary_deformations(cryst)
print('Structures: ')
print(' Vol A B C alph bet gam')
for n, c in enumerate(sl):
print('%.4f (%5.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end='')
print((3*' %7.4f' + ' ' + 3*' %7.2f') %
tuple(c.get_cell_lengths_and_angles()))
|
jochym/Elastic | elastic/elastic.py | scan_volumes | python | def scan_volumes(cryst, lo=0.98, hi=1.02, n=5, scale_volumes=True):
'''
Provide set of crystals along volume axis from lo to hi (inclusive).
No volume cell optimization is performed. Bounds are specified as
fractions (1.10 = 10% increase). If scale_volumes==False the scalling
is applied to lattice vectors instead of volumes.
:param lo: lower bound of the V/V_0 in the scan
:param hi: upper bound of the V/V_0 in the scan
:param n: number of volume sample points
:param scale_volumes: If True scale the unit cell volume or,
if False, scale the length of lattice axes.
:returns: a list of deformed systems
'''
scale = linspace(lo, hi, num=n)
if scale_volumes:
scale **= (1.0/3.0)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems | Provide set of crystals along volume axis from lo to hi (inclusive).
No volume cell optimization is performed. Bounds are specified as
fractions (1.10 = 10% increase). If scale_volumes==False the scalling
is applied to lattice vectors instead of volumes.
:param lo: lower bound of the V/V_0 in the scan
:param hi: upper bound of the V/V_0 in the scan
:param n: number of volume sample points
:param scale_volumes: If True scale the unit cell volume or,
if False, scale the length of lattice axes.
:returns: a list of deformed systems | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/elastic/elastic.py#L595-L617 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 1998-2017 by Paweł T. Jochym <pawel.jochym@ifj.edu.pl>
#
# This file is part of Elastic.
# Elastic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Elastic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Elastic. If not, see <http://www.gnu.org/licenses/>.
'''
.. _elastic-mod:
Elastic Module
^^^^^^^^^^^^^^
Elastic is a module for calculation of :math:`C_{ij}` components of elastic
tensor from the strain-stress relation.
The strain components here are ordered in standard way which is different
to ordering in previous versions of the code (up to 4.0).
The ordering is: :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}`.
The general ordering of :math:`C_{ij}` components is (except for triclinic
symmetry and taking into account customary names of constants - e.g.
:math:`C_{16} \\rightarrow C_{14}`):
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
The functions with the name of bravais lattices define the symmetry of the
:math:`C_{ij}` matrix. The matrix is N columns by 6 rows where the columns
corespond to independent elastic constants of the given crystal, while the rows
corespond to the canonical deformations of a crystal. The elements are the
second partial derivatives of the free energy formula for the crystal written
down as a quadratic form of the deformations with respect to elastic constant
and deformation.
*Note:*
The elements for deformations :math:`u_{xy}, u_{xz}, u_{yz}`
have to be divided by 2 to properly match the usual definition
of elastic constants.
See: [LL]_ L.D. Landau, E.M. Lifszyc, "Theory of elasticity"
There is some usefull summary also at:
`ScienceWorld <http://scienceworld.wolfram.com/physics/Elasticity.html>`_
---------
'''
from __future__ import print_function, division, absolute_import
import re
from ase.atoms import Atoms
try:
# Try new release of spglib
import spglib as spg
except ImportError:
# Old naming scheme
from pyspglib import spglib as spg
from scipy.linalg import norm, lstsq
from scipy import optimize
from numpy.linalg import inv
from numpy import dot, diag, ones, reshape, linspace, array, mean
from math import acos, pi, cos, sin, sqrt
def BMEOS(v, v0, b0, b0p):
return (b0/b0p)*(pow(v0/v, b0p) - 1)
def ctg(x):
return cos(x)/sin(x)
def csc(x):
return 1/sin(x)
def regular(u):
'''
Equation matrix generation for the regular (cubic) lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{12}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, uyy + uzz, 0],
[uyy, uxx + uzz, 0],
[uzz, uxx + uyy, 0],
[0, 0, 2*uyz],
[0, 0, 2*uxz],
[0, 0, 2*uxy]])
def tetragonal(u):
'''
Equation matrix generation for the tetragonal lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, uyy, uzz, 0, 0],
[uyy, 0, uxx, uzz, 0, 0],
[0, uzz, 0, uxx+uyy, 0, 0],
[0, 0, 0, 0, 0, 2*uxy],
[0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 2*uyz, 0]])
def orthorombic(u):
'''
Equation matrix generation for the orthorombic lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0, uyy, uzz, 0, 0, 0, 0],
[0, uyy, 0, uxx, 0, uzz, 0, 0, 0],
[0, 0, uzz, 0, uxx, uyy, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 2*uyz, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 2*uxy]])
def trigonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Not tested yet.
# TODO: There is still some doubt about the :math:`C_{14}` constant.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0, 2*uxz ],
[ uyy, 0, uxx, uzz, 0, -2*uxz ],
[ 0, uzz, 0, uxx+uyy, 0, 0 ],
[ 0, 0, 0, 0, 2*uyz, -4*uxy ],
[ 0, 0, 0, 0, 2*uxz, 2*(uxx-uyy)],
[ 2*uxy, 0, -2*uxy, 0, 0, -4*uyz ]])
def hexagonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Still needs good verification
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0 ],
[ uyy, 0, uxx, uzz, 0 ],
[ 0, uzz, 0, uxx+uyy, 0 ],
[ 0, 0, 0, 0, 2*uyz ],
[ 0, 0, 0, 0, 2*uxz ],
[ 2*uxy, 0, -2*uxy, 0, 0 ]])
def monoclinic(u):
'''Monoclinic group,
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz, 0]])
def triclinic(u):
'''Triclinic crystals.
*Note*: This was never tested on the real case. Beware!
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33},
C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66},
C_{16}, C_{26}, C_{36}, C_{46}, C_{56},
C_{14}, C_{15}, C_{25}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# Based on the monoclinic matrix and not tested on real case.
# If you have test cases for this symmetry send them to the author.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0,uyz,uxz, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0,uxz, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxy, 0,uxx, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0, 0,uxy, 0,uxx,uyy,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz,uyz,uxz, 0, 0, 0, 0]])
def get_cij_order(cryst):
'''Give order of of elastic constants for the structure
:param cryst: ASE Atoms object
:returns: Order of elastic constants as a tuple of strings: C_ij
'''
orders = {
1: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36',
'C_46', 'C_56', 'C_14', 'C_15', 'C_25', 'C_45'),
2: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36', 'C_45'),
3: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23', 'C_44',
'C_55', 'C_66'),
4: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
5: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
6: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44'),
7: ('C_11', 'C_12', 'C_44'),
}
return orders[get_lattice_type(cryst)[0]]
def get_lattice_type(cryst):
'''Find the symmetry of the crystal using spglib symmetry finder.
Derive name of the space group and its number extracted from the result.
Based on the group number identify also the lattice type and the Bravais
lattice of the crystal. The lattice type numbers are
(the numbering starts from 1):
Triclinic (1), Monoclinic (2), Orthorombic (3),
Tetragonal (4), Trigonal (5), Hexagonal (6), Cubic (7)
:param cryst: ASE Atoms object
:returns: tuple (lattice type number (1-7), lattice name, space group
name, space group number)
'''
# Table of lattice types and correcponding group numbers dividing
# the ranges. See get_lattice_type method for precise definition.
lattice_types = [
[3, "Triclinic"],
[16, "Monoclinic"],
[75, "Orthorombic"],
[143, "Tetragonal"],
[168, "Trigonal"],
[195, "Hexagonal"],
[231, "Cubic"]
]
sg = spg.get_spacegroup(cryst)
m = re.match(r'([A-Z].*\b)\s*\(([0-9]*)\)', sg)
sg_name = m.group(1)
sg_nr = int(m.group(2))
for n, l in enumerate(lattice_types):
if sg_nr < l[0]:
bravais = l[1]
lattype = n+1
break
return lattype, bravais, sg_name, sg_nr
def get_bulk_modulus(cryst):
'''Calculate bulk modulus using the Birch-Murnaghan equation of state.
The EOS must be previously calculated by get_BM_EOS routine.
The returned bulk modulus is a :math:`B_0` coefficient of the B-M EOS.
The units of the result are defined by ASE. To get the result in
any particular units (e.g. GPa) you need to divide it by
ase.units.<unit name>::
get_bulk_modulus(cryst)/ase.units.GPa
:param cryst: ASE Atoms object
:returns: float, bulk modulus :math:`B_0` in ASE units.
'''
if getattr(cryst, 'bm_eos', None) is None:
raise RuntimeError('Missing B-M EOS data.')
cryst.bulk_modulus = cryst.bm_eos[1]
return cryst.bulk_modulus
def get_pressure(s):
'''Return *external* isotropic (hydrostatic) pressure in ASE units.
If the pressure is positive the system is under external pressure.
This is a convenience function to convert output of get_stress function
into external pressure.
:param cryst: stress tensor in Voight (vector) notation as returned by
the get_stress() method.
:returns: float, external hydrostatic pressure in ASE units.
'''
return -mean(s[:3])
def get_BM_EOS(cryst, systems):
"""Calculate Birch-Murnaghan Equation of State for the crystal.
The B-M equation of state is defined by:
.. math::
P(V)= \\frac{B_0}{B'_0}\\left[
\\left({\\frac{V}{V_0}}\\right)^{-B'_0} - 1
\\right]
It's coefficients are estimated using n single-point structures ganerated
from the crystal (cryst) by the scan_volumes function between two relative
volumes. The BM EOS is fitted to the computed points by
least squares method. The returned value is a list of fitted
parameters: :math:`V_0, B_0, B_0'` if the fit succeded.
If the fitting fails the ``RuntimeError('Calculation failed')`` is raised.
The data from the calculation and fit is stored in the bm_eos and pv
members of cryst for future reference. You have to provide properly
optimized structures in cryst and systems list.
:param cryst: Atoms object, basic structure
:param systems: A list of calculated structures
:returns: tuple of EOS parameters :math:`V_0, B_0, B_0'`.
"""
pvdat = array([[r.get_volume(),
get_pressure(r.get_stress()),
norm(r.get_cell()[:, 0]),
norm(r.get_cell()[:, 1]),
norm(r.get_cell()[:, 2])] for r in systems]).T
# Estimate the initial guess assuming b0p=1
# Limiting volumes
v1 = min(pvdat[0])
v2 = max(pvdat[0])
# The pressure is falling with the growing volume
p2 = min(pvdat[1])
p1 = max(pvdat[1])
b0 = (p1*v1-p2*v2)/(v2-v1)
v0 = v1*(p1+b0)/b0
# Initial guess
p0 = [v0, b0, 1]
# Fitting
try :
p1, succ = optimize.curve_fit(BMEOS, pvdat[0], pvdat[1], p0)
except (ValueError, RuntimeError, optimize.OptimizeWarning) as ex:
raise RuntimeError('Calculation failed')
cryst.bm_eos = p1
cryst.pv = pvdat
return cryst.bm_eos
def get_elementary_deformations(cryst, n=5, d=2):
'''Generate elementary deformations for elastic tensor calculation.
The deformations are created based on the symmetry of the crystal and
are limited to the non-equivalet axes of the crystal.
:param cryst: Atoms object, basic structure
:param n: integer, number of deformations per non-equivalent axis
:param d: float, size of the maximum deformation in percent and degrees
:returns: list of deformed structures
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
systems = []
for a in axis:
if a < 3: # tetragonal deformation
for dx in linspace(-d, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
elif a < 6: # sheer deformation (skip the zero angle)
for dx in linspace(d/10.0, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
return systems
def get_elastic_tensor(cryst, systems):
'''Calculate elastic tensor of the crystal.
The elastic tensor is calculated from the stress-strain relation
and derived by fitting this relation to the set of linear equations
build from the symmetry of the crystal and strains and stresses
of the set of elementary deformations of the unit cell.
It is assumed that the crystal is converged and optimized
under intended pressure/stress. The geometry and stress on the
cryst is taken as the reference point. No additional optimization
will be run. Structures in cryst and systems list must have calculated
stresses. The function returns tuple of :math:`C_{ij}` elastic tensor,
raw Birch coefficients :math:`B_{ij}` and fitting results: residuals,
solution rank, singular values returned by numpy.linalg.lstsq.
:param cryst: Atoms object, basic structure
:param systems: list of Atoms object with calculated deformed structures
:returns: tuple(:math:`C_{ij}` float vector,
tuple(:math:`B_{ij}` float vector, residuals, solution rank, singular values))
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
ul = []
sl = []
p = get_pressure(cryst.get_stress())
for g in systems:
ul.append(get_strain(g, refcell=cryst))
# Remove the ambient pressure from the stress tensor
sl.append(g.get_stress()-array([p, p, p, 0, 0, 0]))
# print(symm, ul)
eqm = array([symm(u) for u in ul])
# print(eqm)
# print(eqm[0].shape, eqm.shape)
eqm = reshape(eqm, (eqm.shape[0]*eqm.shape[1], eqm.shape[2]))
# print(eqm)
slm = reshape(array(sl), (-1,))
# print(eqm.shape, slm.shape)
# print(slm)
Bij = lstsq(eqm, slm)
# print(Bij[0] / units.GPa)
# Calculate elastic constants from Birch coeff.
# TODO: Check the sign of the pressure array in the B <=> C relation
if (symm == orthorombic):
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p])
elif (symm == tetragonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, -p])
elif (symm == regular):
Cij = Bij[0] - array([-p, p, -p])
elif (symm == trigonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, p])
elif (symm == hexagonal):
Cij = Bij[0] - array([-p, -p, p, p, -p])
elif (symm == monoclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p, p, p, p, p])
elif (symm == triclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p,
p, p, p, p, p, p, p, p, p])
return Cij, Bij
def scan_pressures(cryst, lo, hi, n=5, eos=None):
'''
Scan the pressure axis from lo to hi (inclusive)
using B-M EOS as the volume predictor.
Pressure (lo, hi) in GPa
'''
# Inverse B-M EOS to get volumes from pressures
# This will work only in limited pressure range p>-B/B'.
# Warning! Relative, the V0 prefactor is removed.
def invbmeos(b, bp, x):
return array([pow(b/(bp*xv+b), 1/(3*bp)) for xv in x])
if eos is None:
raise RuntimeError('Required EOS data missing')
# Limit negative pressures to 90% of the singularity value.
# Beyond this B-M EOS is bound to be wrong anyway.
lo = max(lo, -0.9*eos[1]/eos[2])
scale = (eos[0]/cryst.get_volume())*invbmeos(eos[1], eos[2],
linspace(lo, hi, num=n))
# print(scale)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def get_vecang_cell(cryst, uc=None):
'''
Compute A,B,C, alpha,beta,gamma cell params
from the unit cell matrix (uc) or cryst.
Angles in radians.
'''
if uc is None:
uc = cryst.get_cell()
ucv = [uc[i, :]/norm(uc[i, :]) for i in range(3)]
uca = [acos(dot(ucv[(i+1) % 3], ucv[(i+2) % 3])) for i in range(3)]
return [norm(uc[i, :]) for i in range(3)] + uca
def get_deformed_cell(base_cryst, axis=0, size=1):
'''
Return the cell (with atoms) deformed along one
cell parameter (0,1,2 = a,b,c ; 3,4,5 = alpha,beta,gamma) by
size percent or size degrees (axis/angles).
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
if axis < 3:
uc[axis, :] = (1+size/100.0)*uc[axis, :]
else:
(a, b, c, alp, bet, gam) = get_vecang_cell(cryst)
d = array([0.0, 0.0, 0.0])
d[axis-3] = pi*size/180
(alp, bet, gam) = array((alp, bet, gam))+d
t = 1 - (ctg(bet)*ctg(gam)-cos(alp)*csc(bet)*csc(gam))**2
if t < 0.0:
print('''
The parameters (alpha,beta,gamma)=(%f,%f,%f) are probably
incorrect and lead to imaginary coordinates.
This range of parameters is unsupported by this program
(and is, let me say, very strange for a crystal).
Cennot continue, bye.''' % (alp, bet, gam))
raise ValueError
else:
uc = [[a, 0.0, 0.0],
[b*cos(gam), b*sin(gam), 0],
[c*cos(bet),
c*(cos(alp)/sin(gam) - cos(bet)*ctg(gam)),
c*sin(bet)*sqrt(t)]]
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_cart_deformed_cell(base_cryst, axis=0, size=1):
'''Return the cell deformed along one of the cartesian directions
Creates new deformed structure. The deformation is based on the
base structure and is performed along single axis. The axis is
specified as follows: 0,1,2 = x,y,z ; sheers: 3,4,5 = yz, xz, xy.
The size of the deformation is in percent and degrees, respectively.
:param base_cryst: structure to be deformed
:param axis: direction of deformation
:param size: size of the deformation
:returns: new, deformed structure
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
s = size/100.0
L = diag(ones(3))
if axis < 3:
L[axis, axis] += s
else:
if axis == 3:
L[1, 2] += s
elif axis == 4:
L[0, 2] += s
else:
L[0, 1] += s
uc = dot(uc, L)
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_strain(cryst, refcell=None):
'''Calculate strain tensor in the Voight notation
Computes the strain tensor in the Voight notation as a conventional
6-vector. The calculation is done with respect to the crystal
geometry passed in refcell parameter.
:param cryst: deformed structure
:param refcell: reference, undeformed structure
:returns: 6-vector of strain tensor in the Voight notation
'''
if refcell is None:
refcell = cryst
du = cryst.get_cell()-refcell.get_cell()
m = refcell.get_cell()
m = inv(m)
u = dot(m, du)
u = (u+u.T)/2
return array([u[0, 0], u[1, 1], u[2, 2], u[2, 1], u[2, 0], u[1, 0]])
if __name__ == '__main__':
from ase.spacegroup import crystal
a = 4.194
cryst = crystal(['Mg', 'O'],
[(0, 0, 0), (0.5, 0.5, 0.5)],
spacegroup=225,
cellpar=[a, a, a, 90, 90, 90])
sl = scan_volumes(cryst)
print('Volumes: ', end='')
for c in sl:
print('%.2f (%.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end=' ')
print()
sl = get_elementary_deformations(cryst)
print('Structures: ')
print(' Vol A B C alph bet gam')
for n, c in enumerate(sl):
print('%.4f (%5.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end='')
print((3*' %7.4f' + ' ' + 3*' %7.2f') %
tuple(c.get_cell_lengths_and_angles()))
|
jochym/Elastic | elastic/elastic.py | get_vecang_cell | python | def get_vecang_cell(cryst, uc=None):
'''
Compute A,B,C, alpha,beta,gamma cell params
from the unit cell matrix (uc) or cryst.
Angles in radians.
'''
if uc is None:
uc = cryst.get_cell()
ucv = [uc[i, :]/norm(uc[i, :]) for i in range(3)]
uca = [acos(dot(ucv[(i+1) % 3], ucv[(i+2) % 3])) for i in range(3)]
return [norm(uc[i, :]) for i in range(3)] + uca | Compute A,B,C, alpha,beta,gamma cell params
from the unit cell matrix (uc) or cryst.
Angles in radians. | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/elastic/elastic.py#L620-L630 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 1998-2017 by Paweł T. Jochym <pawel.jochym@ifj.edu.pl>
#
# This file is part of Elastic.
# Elastic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Elastic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Elastic. If not, see <http://www.gnu.org/licenses/>.
'''
.. _elastic-mod:
Elastic Module
^^^^^^^^^^^^^^
Elastic is a module for calculation of :math:`C_{ij}` components of elastic
tensor from the strain-stress relation.
The strain components here are ordered in standard way which is different
to ordering in previous versions of the code (up to 4.0).
The ordering is: :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}`.
The general ordering of :math:`C_{ij}` components is (except for triclinic
symmetry and taking into account customary names of constants - e.g.
:math:`C_{16} \\rightarrow C_{14}`):
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
The functions with the name of bravais lattices define the symmetry of the
:math:`C_{ij}` matrix. The matrix is N columns by 6 rows where the columns
corespond to independent elastic constants of the given crystal, while the rows
corespond to the canonical deformations of a crystal. The elements are the
second partial derivatives of the free energy formula for the crystal written
down as a quadratic form of the deformations with respect to elastic constant
and deformation.
*Note:*
The elements for deformations :math:`u_{xy}, u_{xz}, u_{yz}`
have to be divided by 2 to properly match the usual definition
of elastic constants.
See: [LL]_ L.D. Landau, E.M. Lifszyc, "Theory of elasticity"
There is some usefull summary also at:
`ScienceWorld <http://scienceworld.wolfram.com/physics/Elasticity.html>`_
---------
'''
from __future__ import print_function, division, absolute_import
import re
from ase.atoms import Atoms
try:
# Try new release of spglib
import spglib as spg
except ImportError:
# Old naming scheme
from pyspglib import spglib as spg
from scipy.linalg import norm, lstsq
from scipy import optimize
from numpy.linalg import inv
from numpy import dot, diag, ones, reshape, linspace, array, mean
from math import acos, pi, cos, sin, sqrt
def BMEOS(v, v0, b0, b0p):
return (b0/b0p)*(pow(v0/v, b0p) - 1)
def ctg(x):
return cos(x)/sin(x)
def csc(x):
return 1/sin(x)
def regular(u):
'''
Equation matrix generation for the regular (cubic) lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{12}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, uyy + uzz, 0],
[uyy, uxx + uzz, 0],
[uzz, uxx + uyy, 0],
[0, 0, 2*uyz],
[0, 0, 2*uxz],
[0, 0, 2*uxy]])
def tetragonal(u):
'''
Equation matrix generation for the tetragonal lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, uyy, uzz, 0, 0],
[uyy, 0, uxx, uzz, 0, 0],
[0, uzz, 0, uxx+uyy, 0, 0],
[0, 0, 0, 0, 0, 2*uxy],
[0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 2*uyz, 0]])
def orthorombic(u):
'''
Equation matrix generation for the orthorombic lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0, uyy, uzz, 0, 0, 0, 0],
[0, uyy, 0, uxx, 0, uzz, 0, 0, 0],
[0, 0, uzz, 0, uxx, uyy, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 2*uyz, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 2*uxy]])
def trigonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Not tested yet.
# TODO: There is still some doubt about the :math:`C_{14}` constant.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0, 2*uxz ],
[ uyy, 0, uxx, uzz, 0, -2*uxz ],
[ 0, uzz, 0, uxx+uyy, 0, 0 ],
[ 0, 0, 0, 0, 2*uyz, -4*uxy ],
[ 0, 0, 0, 0, 2*uxz, 2*(uxx-uyy)],
[ 2*uxy, 0, -2*uxy, 0, 0, -4*uyz ]])
def hexagonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Still needs good verification
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0 ],
[ uyy, 0, uxx, uzz, 0 ],
[ 0, uzz, 0, uxx+uyy, 0 ],
[ 0, 0, 0, 0, 2*uyz ],
[ 0, 0, 0, 0, 2*uxz ],
[ 2*uxy, 0, -2*uxy, 0, 0 ]])
def monoclinic(u):
'''Monoclinic group,
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz, 0]])
def triclinic(u):
'''Triclinic crystals.
*Note*: This was never tested on the real case. Beware!
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33},
C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66},
C_{16}, C_{26}, C_{36}, C_{46}, C_{56},
C_{14}, C_{15}, C_{25}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# Based on the monoclinic matrix and not tested on real case.
# If you have test cases for this symmetry send them to the author.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0,uyz,uxz, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0,uxz, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxy, 0,uxx, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0, 0,uxy, 0,uxx,uyy,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz,uyz,uxz, 0, 0, 0, 0]])
def get_cij_order(cryst):
'''Give order of of elastic constants for the structure
:param cryst: ASE Atoms object
:returns: Order of elastic constants as a tuple of strings: C_ij
'''
orders = {
1: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36',
'C_46', 'C_56', 'C_14', 'C_15', 'C_25', 'C_45'),
2: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36', 'C_45'),
3: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23', 'C_44',
'C_55', 'C_66'),
4: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
5: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
6: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44'),
7: ('C_11', 'C_12', 'C_44'),
}
return orders[get_lattice_type(cryst)[0]]
def get_lattice_type(cryst):
'''Find the symmetry of the crystal using spglib symmetry finder.
Derive name of the space group and its number extracted from the result.
Based on the group number identify also the lattice type and the Bravais
lattice of the crystal. The lattice type numbers are
(the numbering starts from 1):
Triclinic (1), Monoclinic (2), Orthorombic (3),
Tetragonal (4), Trigonal (5), Hexagonal (6), Cubic (7)
:param cryst: ASE Atoms object
:returns: tuple (lattice type number (1-7), lattice name, space group
name, space group number)
'''
# Table of lattice types and correcponding group numbers dividing
# the ranges. See get_lattice_type method for precise definition.
lattice_types = [
[3, "Triclinic"],
[16, "Monoclinic"],
[75, "Orthorombic"],
[143, "Tetragonal"],
[168, "Trigonal"],
[195, "Hexagonal"],
[231, "Cubic"]
]
sg = spg.get_spacegroup(cryst)
m = re.match(r'([A-Z].*\b)\s*\(([0-9]*)\)', sg)
sg_name = m.group(1)
sg_nr = int(m.group(2))
for n, l in enumerate(lattice_types):
if sg_nr < l[0]:
bravais = l[1]
lattype = n+1
break
return lattype, bravais, sg_name, sg_nr
def get_bulk_modulus(cryst):
'''Calculate bulk modulus using the Birch-Murnaghan equation of state.
The EOS must be previously calculated by get_BM_EOS routine.
The returned bulk modulus is a :math:`B_0` coefficient of the B-M EOS.
The units of the result are defined by ASE. To get the result in
any particular units (e.g. GPa) you need to divide it by
ase.units.<unit name>::
get_bulk_modulus(cryst)/ase.units.GPa
:param cryst: ASE Atoms object
:returns: float, bulk modulus :math:`B_0` in ASE units.
'''
if getattr(cryst, 'bm_eos', None) is None:
raise RuntimeError('Missing B-M EOS data.')
cryst.bulk_modulus = cryst.bm_eos[1]
return cryst.bulk_modulus
def get_pressure(s):
'''Return *external* isotropic (hydrostatic) pressure in ASE units.
If the pressure is positive the system is under external pressure.
This is a convenience function to convert output of get_stress function
into external pressure.
:param cryst: stress tensor in Voight (vector) notation as returned by
the get_stress() method.
:returns: float, external hydrostatic pressure in ASE units.
'''
return -mean(s[:3])
def get_BM_EOS(cryst, systems):
"""Calculate Birch-Murnaghan Equation of State for the crystal.
The B-M equation of state is defined by:
.. math::
P(V)= \\frac{B_0}{B'_0}\\left[
\\left({\\frac{V}{V_0}}\\right)^{-B'_0} - 1
\\right]
It's coefficients are estimated using n single-point structures ganerated
from the crystal (cryst) by the scan_volumes function between two relative
volumes. The BM EOS is fitted to the computed points by
least squares method. The returned value is a list of fitted
parameters: :math:`V_0, B_0, B_0'` if the fit succeded.
If the fitting fails the ``RuntimeError('Calculation failed')`` is raised.
The data from the calculation and fit is stored in the bm_eos and pv
members of cryst for future reference. You have to provide properly
optimized structures in cryst and systems list.
:param cryst: Atoms object, basic structure
:param systems: A list of calculated structures
:returns: tuple of EOS parameters :math:`V_0, B_0, B_0'`.
"""
pvdat = array([[r.get_volume(),
get_pressure(r.get_stress()),
norm(r.get_cell()[:, 0]),
norm(r.get_cell()[:, 1]),
norm(r.get_cell()[:, 2])] for r in systems]).T
# Estimate the initial guess assuming b0p=1
# Limiting volumes
v1 = min(pvdat[0])
v2 = max(pvdat[0])
# The pressure is falling with the growing volume
p2 = min(pvdat[1])
p1 = max(pvdat[1])
b0 = (p1*v1-p2*v2)/(v2-v1)
v0 = v1*(p1+b0)/b0
# Initial guess
p0 = [v0, b0, 1]
# Fitting
try :
p1, succ = optimize.curve_fit(BMEOS, pvdat[0], pvdat[1], p0)
except (ValueError, RuntimeError, optimize.OptimizeWarning) as ex:
raise RuntimeError('Calculation failed')
cryst.bm_eos = p1
cryst.pv = pvdat
return cryst.bm_eos
def get_elementary_deformations(cryst, n=5, d=2):
'''Generate elementary deformations for elastic tensor calculation.
The deformations are created based on the symmetry of the crystal and
are limited to the non-equivalet axes of the crystal.
:param cryst: Atoms object, basic structure
:param n: integer, number of deformations per non-equivalent axis
:param d: float, size of the maximum deformation in percent and degrees
:returns: list of deformed structures
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
systems = []
for a in axis:
if a < 3: # tetragonal deformation
for dx in linspace(-d, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
elif a < 6: # sheer deformation (skip the zero angle)
for dx in linspace(d/10.0, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
return systems
def get_elastic_tensor(cryst, systems):
'''Calculate elastic tensor of the crystal.
The elastic tensor is calculated from the stress-strain relation
and derived by fitting this relation to the set of linear equations
build from the symmetry of the crystal and strains and stresses
of the set of elementary deformations of the unit cell.
It is assumed that the crystal is converged and optimized
under intended pressure/stress. The geometry and stress on the
cryst is taken as the reference point. No additional optimization
will be run. Structures in cryst and systems list must have calculated
stresses. The function returns tuple of :math:`C_{ij}` elastic tensor,
raw Birch coefficients :math:`B_{ij}` and fitting results: residuals,
solution rank, singular values returned by numpy.linalg.lstsq.
:param cryst: Atoms object, basic structure
:param systems: list of Atoms object with calculated deformed structures
:returns: tuple(:math:`C_{ij}` float vector,
tuple(:math:`B_{ij}` float vector, residuals, solution rank, singular values))
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
ul = []
sl = []
p = get_pressure(cryst.get_stress())
for g in systems:
ul.append(get_strain(g, refcell=cryst))
# Remove the ambient pressure from the stress tensor
sl.append(g.get_stress()-array([p, p, p, 0, 0, 0]))
# print(symm, ul)
eqm = array([symm(u) for u in ul])
# print(eqm)
# print(eqm[0].shape, eqm.shape)
eqm = reshape(eqm, (eqm.shape[0]*eqm.shape[1], eqm.shape[2]))
# print(eqm)
slm = reshape(array(sl), (-1,))
# print(eqm.shape, slm.shape)
# print(slm)
Bij = lstsq(eqm, slm)
# print(Bij[0] / units.GPa)
# Calculate elastic constants from Birch coeff.
# TODO: Check the sign of the pressure array in the B <=> C relation
if (symm == orthorombic):
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p])
elif (symm == tetragonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, -p])
elif (symm == regular):
Cij = Bij[0] - array([-p, p, -p])
elif (symm == trigonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, p])
elif (symm == hexagonal):
Cij = Bij[0] - array([-p, -p, p, p, -p])
elif (symm == monoclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p, p, p, p, p])
elif (symm == triclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p,
p, p, p, p, p, p, p, p, p])
return Cij, Bij
def scan_pressures(cryst, lo, hi, n=5, eos=None):
'''
Scan the pressure axis from lo to hi (inclusive)
using B-M EOS as the volume predictor.
Pressure (lo, hi) in GPa
'''
# Inverse B-M EOS to get volumes from pressures
# This will work only in limited pressure range p>-B/B'.
# Warning! Relative, the V0 prefactor is removed.
def invbmeos(b, bp, x):
return array([pow(b/(bp*xv+b), 1/(3*bp)) for xv in x])
if eos is None:
raise RuntimeError('Required EOS data missing')
# Limit negative pressures to 90% of the singularity value.
# Beyond this B-M EOS is bound to be wrong anyway.
lo = max(lo, -0.9*eos[1]/eos[2])
scale = (eos[0]/cryst.get_volume())*invbmeos(eos[1], eos[2],
linspace(lo, hi, num=n))
# print(scale)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def scan_volumes(cryst, lo=0.98, hi=1.02, n=5, scale_volumes=True):
'''
Provide set of crystals along volume axis from lo to hi (inclusive).
No volume cell optimization is performed. Bounds are specified as
fractions (1.10 = 10% increase). If scale_volumes==False the scalling
is applied to lattice vectors instead of volumes.
:param lo: lower bound of the V/V_0 in the scan
:param hi: upper bound of the V/V_0 in the scan
:param n: number of volume sample points
:param scale_volumes: If True scale the unit cell volume or,
if False, scale the length of lattice axes.
:returns: a list of deformed systems
'''
scale = linspace(lo, hi, num=n)
if scale_volumes:
scale **= (1.0/3.0)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def get_deformed_cell(base_cryst, axis=0, size=1):
'''
Return the cell (with atoms) deformed along one
cell parameter (0,1,2 = a,b,c ; 3,4,5 = alpha,beta,gamma) by
size percent or size degrees (axis/angles).
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
if axis < 3:
uc[axis, :] = (1+size/100.0)*uc[axis, :]
else:
(a, b, c, alp, bet, gam) = get_vecang_cell(cryst)
d = array([0.0, 0.0, 0.0])
d[axis-3] = pi*size/180
(alp, bet, gam) = array((alp, bet, gam))+d
t = 1 - (ctg(bet)*ctg(gam)-cos(alp)*csc(bet)*csc(gam))**2
if t < 0.0:
print('''
The parameters (alpha,beta,gamma)=(%f,%f,%f) are probably
incorrect and lead to imaginary coordinates.
This range of parameters is unsupported by this program
(and is, let me say, very strange for a crystal).
Cennot continue, bye.''' % (alp, bet, gam))
raise ValueError
else:
uc = [[a, 0.0, 0.0],
[b*cos(gam), b*sin(gam), 0],
[c*cos(bet),
c*(cos(alp)/sin(gam) - cos(bet)*ctg(gam)),
c*sin(bet)*sqrt(t)]]
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_cart_deformed_cell(base_cryst, axis=0, size=1):
'''Return the cell deformed along one of the cartesian directions
Creates new deformed structure. The deformation is based on the
base structure and is performed along single axis. The axis is
specified as follows: 0,1,2 = x,y,z ; sheers: 3,4,5 = yz, xz, xy.
The size of the deformation is in percent and degrees, respectively.
:param base_cryst: structure to be deformed
:param axis: direction of deformation
:param size: size of the deformation
:returns: new, deformed structure
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
s = size/100.0
L = diag(ones(3))
if axis < 3:
L[axis, axis] += s
else:
if axis == 3:
L[1, 2] += s
elif axis == 4:
L[0, 2] += s
else:
L[0, 1] += s
uc = dot(uc, L)
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_strain(cryst, refcell=None):
'''Calculate strain tensor in the Voight notation
Computes the strain tensor in the Voight notation as a conventional
6-vector. The calculation is done with respect to the crystal
geometry passed in refcell parameter.
:param cryst: deformed structure
:param refcell: reference, undeformed structure
:returns: 6-vector of strain tensor in the Voight notation
'''
if refcell is None:
refcell = cryst
du = cryst.get_cell()-refcell.get_cell()
m = refcell.get_cell()
m = inv(m)
u = dot(m, du)
u = (u+u.T)/2
return array([u[0, 0], u[1, 1], u[2, 2], u[2, 1], u[2, 0], u[1, 0]])
if __name__ == '__main__':
from ase.spacegroup import crystal
a = 4.194
cryst = crystal(['Mg', 'O'],
[(0, 0, 0), (0.5, 0.5, 0.5)],
spacegroup=225,
cellpar=[a, a, a, 90, 90, 90])
sl = scan_volumes(cryst)
print('Volumes: ', end='')
for c in sl:
print('%.2f (%.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end=' ')
print()
sl = get_elementary_deformations(cryst)
print('Structures: ')
print(' Vol A B C alph bet gam')
for n, c in enumerate(sl):
print('%.4f (%5.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end='')
print((3*' %7.4f' + ' ' + 3*' %7.2f') %
tuple(c.get_cell_lengths_and_angles()))
|
jochym/Elastic | elastic/elastic.py | get_deformed_cell | python | def get_deformed_cell(base_cryst, axis=0, size=1):
'''
Return the cell (with atoms) deformed along one
cell parameter (0,1,2 = a,b,c ; 3,4,5 = alpha,beta,gamma) by
size percent or size degrees (axis/angles).
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
if axis < 3:
uc[axis, :] = (1+size/100.0)*uc[axis, :]
else:
(a, b, c, alp, bet, gam) = get_vecang_cell(cryst)
d = array([0.0, 0.0, 0.0])
d[axis-3] = pi*size/180
(alp, bet, gam) = array((alp, bet, gam))+d
t = 1 - (ctg(bet)*ctg(gam)-cos(alp)*csc(bet)*csc(gam))**2
if t < 0.0:
print('''
The parameters (alpha,beta,gamma)=(%f,%f,%f) are probably
incorrect and lead to imaginary coordinates.
This range of parameters is unsupported by this program
(and is, let me say, very strange for a crystal).
Cennot continue, bye.''' % (alp, bet, gam))
raise ValueError
else:
uc = [[a, 0.0, 0.0],
[b*cos(gam), b*sin(gam), 0],
[c*cos(bet),
c*(cos(alp)/sin(gam) - cos(bet)*ctg(gam)),
c*sin(bet)*sqrt(t)]]
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst | Return the cell (with atoms) deformed along one
cell parameter (0,1,2 = a,b,c ; 3,4,5 = alpha,beta,gamma) by
size percent or size degrees (axis/angles). | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/elastic/elastic.py#L633-L666 | [
"def ctg(x):\n return cos(x)/sin(x)\n",
"def csc(x):\n return 1/sin(x)\n",
"def get_vecang_cell(cryst, uc=None):\n '''\n Compute A,B,C, alpha,beta,gamma cell params\n from the unit cell matrix (uc) or cryst.\n Angles in radians.\n '''\n if uc is None:\n uc = cryst.get_cell()\n ... | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 1998-2017 by Paweł T. Jochym <pawel.jochym@ifj.edu.pl>
#
# This file is part of Elastic.
# Elastic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Elastic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Elastic. If not, see <http://www.gnu.org/licenses/>.
'''
.. _elastic-mod:
Elastic Module
^^^^^^^^^^^^^^
Elastic is a module for calculation of :math:`C_{ij}` components of elastic
tensor from the strain-stress relation.
The strain components here are ordered in standard way which is different
to ordering in previous versions of the code (up to 4.0).
The ordering is: :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}`.
The general ordering of :math:`C_{ij}` components is (except for triclinic
symmetry and taking into account customary names of constants - e.g.
:math:`C_{16} \\rightarrow C_{14}`):
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
The functions with the name of bravais lattices define the symmetry of the
:math:`C_{ij}` matrix. The matrix is N columns by 6 rows where the columns
corespond to independent elastic constants of the given crystal, while the rows
corespond to the canonical deformations of a crystal. The elements are the
second partial derivatives of the free energy formula for the crystal written
down as a quadratic form of the deformations with respect to elastic constant
and deformation.
*Note:*
The elements for deformations :math:`u_{xy}, u_{xz}, u_{yz}`
have to be divided by 2 to properly match the usual definition
of elastic constants.
See: [LL]_ L.D. Landau, E.M. Lifszyc, "Theory of elasticity"
There is some usefull summary also at:
`ScienceWorld <http://scienceworld.wolfram.com/physics/Elasticity.html>`_
---------
'''
from __future__ import print_function, division, absolute_import
import re
from ase.atoms import Atoms
try:
# Try new release of spglib
import spglib as spg
except ImportError:
# Old naming scheme
from pyspglib import spglib as spg
from scipy.linalg import norm, lstsq
from scipy import optimize
from numpy.linalg import inv
from numpy import dot, diag, ones, reshape, linspace, array, mean
from math import acos, pi, cos, sin, sqrt
def BMEOS(v, v0, b0, b0p):
return (b0/b0p)*(pow(v0/v, b0p) - 1)
def ctg(x):
return cos(x)/sin(x)
def csc(x):
return 1/sin(x)
def regular(u):
'''
Equation matrix generation for the regular (cubic) lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{12}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, uyy + uzz, 0],
[uyy, uxx + uzz, 0],
[uzz, uxx + uyy, 0],
[0, 0, 2*uyz],
[0, 0, 2*uxz],
[0, 0, 2*uxy]])
def tetragonal(u):
'''
Equation matrix generation for the tetragonal lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, uyy, uzz, 0, 0],
[uyy, 0, uxx, uzz, 0, 0],
[0, uzz, 0, uxx+uyy, 0, 0],
[0, 0, 0, 0, 0, 2*uxy],
[0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 2*uyz, 0]])
def orthorombic(u):
'''
Equation matrix generation for the orthorombic lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0, uyy, uzz, 0, 0, 0, 0],
[0, uyy, 0, uxx, 0, uzz, 0, 0, 0],
[0, 0, uzz, 0, uxx, uyy, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 2*uyz, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 2*uxy]])
def trigonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Not tested yet.
# TODO: There is still some doubt about the :math:`C_{14}` constant.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0, 2*uxz ],
[ uyy, 0, uxx, uzz, 0, -2*uxz ],
[ 0, uzz, 0, uxx+uyy, 0, 0 ],
[ 0, 0, 0, 0, 2*uyz, -4*uxy ],
[ 0, 0, 0, 0, 2*uxz, 2*(uxx-uyy)],
[ 2*uxy, 0, -2*uxy, 0, 0, -4*uyz ]])
def hexagonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Still needs good verification
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0 ],
[ uyy, 0, uxx, uzz, 0 ],
[ 0, uzz, 0, uxx+uyy, 0 ],
[ 0, 0, 0, 0, 2*uyz ],
[ 0, 0, 0, 0, 2*uxz ],
[ 2*uxy, 0, -2*uxy, 0, 0 ]])
def monoclinic(u):
'''Monoclinic group,
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz, 0]])
def triclinic(u):
'''Triclinic crystals.
*Note*: This was never tested on the real case. Beware!
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33},
C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66},
C_{16}, C_{26}, C_{36}, C_{46}, C_{56},
C_{14}, C_{15}, C_{25}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# Based on the monoclinic matrix and not tested on real case.
# If you have test cases for this symmetry send them to the author.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0,uyz,uxz, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0,uxz, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxy, 0,uxx, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0, 0,uxy, 0,uxx,uyy,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz,uyz,uxz, 0, 0, 0, 0]])
def get_cij_order(cryst):
'''Give order of of elastic constants for the structure
:param cryst: ASE Atoms object
:returns: Order of elastic constants as a tuple of strings: C_ij
'''
orders = {
1: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36',
'C_46', 'C_56', 'C_14', 'C_15', 'C_25', 'C_45'),
2: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36', 'C_45'),
3: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23', 'C_44',
'C_55', 'C_66'),
4: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
5: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
6: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44'),
7: ('C_11', 'C_12', 'C_44'),
}
return orders[get_lattice_type(cryst)[0]]
def get_lattice_type(cryst):
'''Find the symmetry of the crystal using spglib symmetry finder.
Derive name of the space group and its number extracted from the result.
Based on the group number identify also the lattice type and the Bravais
lattice of the crystal. The lattice type numbers are
(the numbering starts from 1):
Triclinic (1), Monoclinic (2), Orthorombic (3),
Tetragonal (4), Trigonal (5), Hexagonal (6), Cubic (7)
:param cryst: ASE Atoms object
:returns: tuple (lattice type number (1-7), lattice name, space group
name, space group number)
'''
# Table of lattice types and correcponding group numbers dividing
# the ranges. See get_lattice_type method for precise definition.
lattice_types = [
[3, "Triclinic"],
[16, "Monoclinic"],
[75, "Orthorombic"],
[143, "Tetragonal"],
[168, "Trigonal"],
[195, "Hexagonal"],
[231, "Cubic"]
]
sg = spg.get_spacegroup(cryst)
m = re.match(r'([A-Z].*\b)\s*\(([0-9]*)\)', sg)
sg_name = m.group(1)
sg_nr = int(m.group(2))
for n, l in enumerate(lattice_types):
if sg_nr < l[0]:
bravais = l[1]
lattype = n+1
break
return lattype, bravais, sg_name, sg_nr
def get_bulk_modulus(cryst):
'''Calculate bulk modulus using the Birch-Murnaghan equation of state.
The EOS must be previously calculated by get_BM_EOS routine.
The returned bulk modulus is a :math:`B_0` coefficient of the B-M EOS.
The units of the result are defined by ASE. To get the result in
any particular units (e.g. GPa) you need to divide it by
ase.units.<unit name>::
get_bulk_modulus(cryst)/ase.units.GPa
:param cryst: ASE Atoms object
:returns: float, bulk modulus :math:`B_0` in ASE units.
'''
if getattr(cryst, 'bm_eos', None) is None:
raise RuntimeError('Missing B-M EOS data.')
cryst.bulk_modulus = cryst.bm_eos[1]
return cryst.bulk_modulus
def get_pressure(s):
'''Return *external* isotropic (hydrostatic) pressure in ASE units.
If the pressure is positive the system is under external pressure.
This is a convenience function to convert output of get_stress function
into external pressure.
:param cryst: stress tensor in Voight (vector) notation as returned by
the get_stress() method.
:returns: float, external hydrostatic pressure in ASE units.
'''
return -mean(s[:3])
def get_BM_EOS(cryst, systems):
"""Calculate Birch-Murnaghan Equation of State for the crystal.
The B-M equation of state is defined by:
.. math::
P(V)= \\frac{B_0}{B'_0}\\left[
\\left({\\frac{V}{V_0}}\\right)^{-B'_0} - 1
\\right]
It's coefficients are estimated using n single-point structures ganerated
from the crystal (cryst) by the scan_volumes function between two relative
volumes. The BM EOS is fitted to the computed points by
least squares method. The returned value is a list of fitted
parameters: :math:`V_0, B_0, B_0'` if the fit succeded.
If the fitting fails the ``RuntimeError('Calculation failed')`` is raised.
The data from the calculation and fit is stored in the bm_eos and pv
members of cryst for future reference. You have to provide properly
optimized structures in cryst and systems list.
:param cryst: Atoms object, basic structure
:param systems: A list of calculated structures
:returns: tuple of EOS parameters :math:`V_0, B_0, B_0'`.
"""
pvdat = array([[r.get_volume(),
get_pressure(r.get_stress()),
norm(r.get_cell()[:, 0]),
norm(r.get_cell()[:, 1]),
norm(r.get_cell()[:, 2])] for r in systems]).T
# Estimate the initial guess assuming b0p=1
# Limiting volumes
v1 = min(pvdat[0])
v2 = max(pvdat[0])
# The pressure is falling with the growing volume
p2 = min(pvdat[1])
p1 = max(pvdat[1])
b0 = (p1*v1-p2*v2)/(v2-v1)
v0 = v1*(p1+b0)/b0
# Initial guess
p0 = [v0, b0, 1]
# Fitting
try :
p1, succ = optimize.curve_fit(BMEOS, pvdat[0], pvdat[1], p0)
except (ValueError, RuntimeError, optimize.OptimizeWarning) as ex:
raise RuntimeError('Calculation failed')
cryst.bm_eos = p1
cryst.pv = pvdat
return cryst.bm_eos
def get_elementary_deformations(cryst, n=5, d=2):
'''Generate elementary deformations for elastic tensor calculation.
The deformations are created based on the symmetry of the crystal and
are limited to the non-equivalet axes of the crystal.
:param cryst: Atoms object, basic structure
:param n: integer, number of deformations per non-equivalent axis
:param d: float, size of the maximum deformation in percent and degrees
:returns: list of deformed structures
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
systems = []
for a in axis:
if a < 3: # tetragonal deformation
for dx in linspace(-d, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
elif a < 6: # sheer deformation (skip the zero angle)
for dx in linspace(d/10.0, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
return systems
def get_elastic_tensor(cryst, systems):
'''Calculate elastic tensor of the crystal.
The elastic tensor is calculated from the stress-strain relation
and derived by fitting this relation to the set of linear equations
build from the symmetry of the crystal and strains and stresses
of the set of elementary deformations of the unit cell.
It is assumed that the crystal is converged and optimized
under intended pressure/stress. The geometry and stress on the
cryst is taken as the reference point. No additional optimization
will be run. Structures in cryst and systems list must have calculated
stresses. The function returns tuple of :math:`C_{ij}` elastic tensor,
raw Birch coefficients :math:`B_{ij}` and fitting results: residuals,
solution rank, singular values returned by numpy.linalg.lstsq.
:param cryst: Atoms object, basic structure
:param systems: list of Atoms object with calculated deformed structures
:returns: tuple(:math:`C_{ij}` float vector,
tuple(:math:`B_{ij}` float vector, residuals, solution rank, singular values))
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
ul = []
sl = []
p = get_pressure(cryst.get_stress())
for g in systems:
ul.append(get_strain(g, refcell=cryst))
# Remove the ambient pressure from the stress tensor
sl.append(g.get_stress()-array([p, p, p, 0, 0, 0]))
# print(symm, ul)
eqm = array([symm(u) for u in ul])
# print(eqm)
# print(eqm[0].shape, eqm.shape)
eqm = reshape(eqm, (eqm.shape[0]*eqm.shape[1], eqm.shape[2]))
# print(eqm)
slm = reshape(array(sl), (-1,))
# print(eqm.shape, slm.shape)
# print(slm)
Bij = lstsq(eqm, slm)
# print(Bij[0] / units.GPa)
# Calculate elastic constants from Birch coeff.
# TODO: Check the sign of the pressure array in the B <=> C relation
if (symm == orthorombic):
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p])
elif (symm == tetragonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, -p])
elif (symm == regular):
Cij = Bij[0] - array([-p, p, -p])
elif (symm == trigonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, p])
elif (symm == hexagonal):
Cij = Bij[0] - array([-p, -p, p, p, -p])
elif (symm == monoclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p, p, p, p, p])
elif (symm == triclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p,
p, p, p, p, p, p, p, p, p])
return Cij, Bij
def scan_pressures(cryst, lo, hi, n=5, eos=None):
'''
Scan the pressure axis from lo to hi (inclusive)
using B-M EOS as the volume predictor.
Pressure (lo, hi) in GPa
'''
# Inverse B-M EOS to get volumes from pressures
# This will work only in limited pressure range p>-B/B'.
# Warning! Relative, the V0 prefactor is removed.
def invbmeos(b, bp, x):
return array([pow(b/(bp*xv+b), 1/(3*bp)) for xv in x])
if eos is None:
raise RuntimeError('Required EOS data missing')
# Limit negative pressures to 90% of the singularity value.
# Beyond this B-M EOS is bound to be wrong anyway.
lo = max(lo, -0.9*eos[1]/eos[2])
scale = (eos[0]/cryst.get_volume())*invbmeos(eos[1], eos[2],
linspace(lo, hi, num=n))
# print(scale)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def scan_volumes(cryst, lo=0.98, hi=1.02, n=5, scale_volumes=True):
'''
Provide set of crystals along volume axis from lo to hi (inclusive).
No volume cell optimization is performed. Bounds are specified as
fractions (1.10 = 10% increase). If scale_volumes==False the scalling
is applied to lattice vectors instead of volumes.
:param lo: lower bound of the V/V_0 in the scan
:param hi: upper bound of the V/V_0 in the scan
:param n: number of volume sample points
:param scale_volumes: If True scale the unit cell volume or,
if False, scale the length of lattice axes.
:returns: a list of deformed systems
'''
scale = linspace(lo, hi, num=n)
if scale_volumes:
scale **= (1.0/3.0)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def get_vecang_cell(cryst, uc=None):
'''
Compute A,B,C, alpha,beta,gamma cell params
from the unit cell matrix (uc) or cryst.
Angles in radians.
'''
if uc is None:
uc = cryst.get_cell()
ucv = [uc[i, :]/norm(uc[i, :]) for i in range(3)]
uca = [acos(dot(ucv[(i+1) % 3], ucv[(i+2) % 3])) for i in range(3)]
return [norm(uc[i, :]) for i in range(3)] + uca
def get_cart_deformed_cell(base_cryst, axis=0, size=1):
'''Return the cell deformed along one of the cartesian directions
Creates new deformed structure. The deformation is based on the
base structure and is performed along single axis. The axis is
specified as follows: 0,1,2 = x,y,z ; sheers: 3,4,5 = yz, xz, xy.
The size of the deformation is in percent and degrees, respectively.
:param base_cryst: structure to be deformed
:param axis: direction of deformation
:param size: size of the deformation
:returns: new, deformed structure
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
s = size/100.0
L = diag(ones(3))
if axis < 3:
L[axis, axis] += s
else:
if axis == 3:
L[1, 2] += s
elif axis == 4:
L[0, 2] += s
else:
L[0, 1] += s
uc = dot(uc, L)
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_strain(cryst, refcell=None):
'''Calculate strain tensor in the Voight notation
Computes the strain tensor in the Voight notation as a conventional
6-vector. The calculation is done with respect to the crystal
geometry passed in refcell parameter.
:param cryst: deformed structure
:param refcell: reference, undeformed structure
:returns: 6-vector of strain tensor in the Voight notation
'''
if refcell is None:
refcell = cryst
du = cryst.get_cell()-refcell.get_cell()
m = refcell.get_cell()
m = inv(m)
u = dot(m, du)
u = (u+u.T)/2
return array([u[0, 0], u[1, 1], u[2, 2], u[2, 1], u[2, 0], u[1, 0]])
if __name__ == '__main__':
from ase.spacegroup import crystal
a = 4.194
cryst = crystal(['Mg', 'O'],
[(0, 0, 0), (0.5, 0.5, 0.5)],
spacegroup=225,
cellpar=[a, a, a, 90, 90, 90])
sl = scan_volumes(cryst)
print('Volumes: ', end='')
for c in sl:
print('%.2f (%.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end=' ')
print()
sl = get_elementary_deformations(cryst)
print('Structures: ')
print(' Vol A B C alph bet gam')
for n, c in enumerate(sl):
print('%.4f (%5.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end='')
print((3*' %7.4f' + ' ' + 3*' %7.2f') %
tuple(c.get_cell_lengths_and_angles()))
|
jochym/Elastic | elastic/elastic.py | get_cart_deformed_cell | python | def get_cart_deformed_cell(base_cryst, axis=0, size=1):
'''Return the cell deformed along one of the cartesian directions
Creates new deformed structure. The deformation is based on the
base structure and is performed along single axis. The axis is
specified as follows: 0,1,2 = x,y,z ; sheers: 3,4,5 = yz, xz, xy.
The size of the deformation is in percent and degrees, respectively.
:param base_cryst: structure to be deformed
:param axis: direction of deformation
:param size: size of the deformation
:returns: new, deformed structure
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
s = size/100.0
L = diag(ones(3))
if axis < 3:
L[axis, axis] += s
else:
if axis == 3:
L[1, 2] += s
elif axis == 4:
L[0, 2] += s
else:
L[0, 1] += s
uc = dot(uc, L)
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst | Return the cell deformed along one of the cartesian directions
Creates new deformed structure. The deformation is based on the
base structure and is performed along single axis. The axis is
specified as follows: 0,1,2 = x,y,z ; sheers: 3,4,5 = yz, xz, xy.
The size of the deformation is in percent and degrees, respectively.
:param base_cryst: structure to be deformed
:param axis: direction of deformation
:param size: size of the deformation
:returns: new, deformed structure | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/elastic/elastic.py#L669-L700 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 1998-2017 by Paweł T. Jochym <pawel.jochym@ifj.edu.pl>
#
# This file is part of Elastic.
# Elastic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Elastic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Elastic. If not, see <http://www.gnu.org/licenses/>.
'''
.. _elastic-mod:
Elastic Module
^^^^^^^^^^^^^^
Elastic is a module for calculation of :math:`C_{ij}` components of elastic
tensor from the strain-stress relation.
The strain components here are ordered in standard way which is different
to ordering in previous versions of the code (up to 4.0).
The ordering is: :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}`.
The general ordering of :math:`C_{ij}` components is (except for triclinic
symmetry and taking into account customary names of constants - e.g.
:math:`C_{16} \\rightarrow C_{14}`):
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
The functions with the name of bravais lattices define the symmetry of the
:math:`C_{ij}` matrix. The matrix is N columns by 6 rows where the columns
corespond to independent elastic constants of the given crystal, while the rows
corespond to the canonical deformations of a crystal. The elements are the
second partial derivatives of the free energy formula for the crystal written
down as a quadratic form of the deformations with respect to elastic constant
and deformation.
*Note:*
The elements for deformations :math:`u_{xy}, u_{xz}, u_{yz}`
have to be divided by 2 to properly match the usual definition
of elastic constants.
See: [LL]_ L.D. Landau, E.M. Lifszyc, "Theory of elasticity"
There is some usefull summary also at:
`ScienceWorld <http://scienceworld.wolfram.com/physics/Elasticity.html>`_
---------
'''
from __future__ import print_function, division, absolute_import
import re
from ase.atoms import Atoms
try:
# Try new release of spglib
import spglib as spg
except ImportError:
# Old naming scheme
from pyspglib import spglib as spg
from scipy.linalg import norm, lstsq
from scipy import optimize
from numpy.linalg import inv
from numpy import dot, diag, ones, reshape, linspace, array, mean
from math import acos, pi, cos, sin, sqrt
def BMEOS(v, v0, b0, b0p):
return (b0/b0p)*(pow(v0/v, b0p) - 1)
def ctg(x):
return cos(x)/sin(x)
def csc(x):
return 1/sin(x)
def regular(u):
'''
Equation matrix generation for the regular (cubic) lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{12}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, uyy + uzz, 0],
[uyy, uxx + uzz, 0],
[uzz, uxx + uyy, 0],
[0, 0, 2*uyz],
[0, 0, 2*uxz],
[0, 0, 2*uxy]])
def tetragonal(u):
'''
Equation matrix generation for the tetragonal lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, uyy, uzz, 0, 0],
[uyy, 0, uxx, uzz, 0, 0],
[0, uzz, 0, uxx+uyy, 0, 0],
[0, 0, 0, 0, 0, 2*uxy],
[0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 2*uyz, 0]])
def orthorombic(u):
'''
Equation matrix generation for the orthorombic lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0, uyy, uzz, 0, 0, 0, 0],
[0, uyy, 0, uxx, 0, uzz, 0, 0, 0],
[0, 0, uzz, 0, uxx, uyy, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 2*uyz, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 2*uxy]])
def trigonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Not tested yet.
# TODO: There is still some doubt about the :math:`C_{14}` constant.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0, 2*uxz ],
[ uyy, 0, uxx, uzz, 0, -2*uxz ],
[ 0, uzz, 0, uxx+uyy, 0, 0 ],
[ 0, 0, 0, 0, 2*uyz, -4*uxy ],
[ 0, 0, 0, 0, 2*uxz, 2*(uxx-uyy)],
[ 2*uxy, 0, -2*uxy, 0, 0, -4*uyz ]])
def hexagonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Still needs good verification
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0 ],
[ uyy, 0, uxx, uzz, 0 ],
[ 0, uzz, 0, uxx+uyy, 0 ],
[ 0, 0, 0, 0, 2*uyz ],
[ 0, 0, 0, 0, 2*uxz ],
[ 2*uxy, 0, -2*uxy, 0, 0 ]])
def monoclinic(u):
'''Monoclinic group,
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz, 0]])
def triclinic(u):
'''Triclinic crystals.
*Note*: This was never tested on the real case. Beware!
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33},
C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66},
C_{16}, C_{26}, C_{36}, C_{46}, C_{56},
C_{14}, C_{15}, C_{25}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# Based on the monoclinic matrix and not tested on real case.
# If you have test cases for this symmetry send them to the author.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0,uyz,uxz, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0,uxz, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxy, 0,uxx, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0, 0,uxy, 0,uxx,uyy,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz,uyz,uxz, 0, 0, 0, 0]])
def get_cij_order(cryst):
'''Give order of of elastic constants for the structure
:param cryst: ASE Atoms object
:returns: Order of elastic constants as a tuple of strings: C_ij
'''
orders = {
1: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36',
'C_46', 'C_56', 'C_14', 'C_15', 'C_25', 'C_45'),
2: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36', 'C_45'),
3: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23', 'C_44',
'C_55', 'C_66'),
4: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
5: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
6: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44'),
7: ('C_11', 'C_12', 'C_44'),
}
return orders[get_lattice_type(cryst)[0]]
def get_lattice_type(cryst):
'''Find the symmetry of the crystal using spglib symmetry finder.
Derive name of the space group and its number extracted from the result.
Based on the group number identify also the lattice type and the Bravais
lattice of the crystal. The lattice type numbers are
(the numbering starts from 1):
Triclinic (1), Monoclinic (2), Orthorombic (3),
Tetragonal (4), Trigonal (5), Hexagonal (6), Cubic (7)
:param cryst: ASE Atoms object
:returns: tuple (lattice type number (1-7), lattice name, space group
name, space group number)
'''
# Table of lattice types and correcponding group numbers dividing
# the ranges. See get_lattice_type method for precise definition.
lattice_types = [
[3, "Triclinic"],
[16, "Monoclinic"],
[75, "Orthorombic"],
[143, "Tetragonal"],
[168, "Trigonal"],
[195, "Hexagonal"],
[231, "Cubic"]
]
sg = spg.get_spacegroup(cryst)
m = re.match(r'([A-Z].*\b)\s*\(([0-9]*)\)', sg)
sg_name = m.group(1)
sg_nr = int(m.group(2))
for n, l in enumerate(lattice_types):
if sg_nr < l[0]:
bravais = l[1]
lattype = n+1
break
return lattype, bravais, sg_name, sg_nr
def get_bulk_modulus(cryst):
'''Calculate bulk modulus using the Birch-Murnaghan equation of state.
The EOS must be previously calculated by get_BM_EOS routine.
The returned bulk modulus is a :math:`B_0` coefficient of the B-M EOS.
The units of the result are defined by ASE. To get the result in
any particular units (e.g. GPa) you need to divide it by
ase.units.<unit name>::
get_bulk_modulus(cryst)/ase.units.GPa
:param cryst: ASE Atoms object
:returns: float, bulk modulus :math:`B_0` in ASE units.
'''
if getattr(cryst, 'bm_eos', None) is None:
raise RuntimeError('Missing B-M EOS data.')
cryst.bulk_modulus = cryst.bm_eos[1]
return cryst.bulk_modulus
def get_pressure(s):
'''Return *external* isotropic (hydrostatic) pressure in ASE units.
If the pressure is positive the system is under external pressure.
This is a convenience function to convert output of get_stress function
into external pressure.
:param cryst: stress tensor in Voight (vector) notation as returned by
the get_stress() method.
:returns: float, external hydrostatic pressure in ASE units.
'''
return -mean(s[:3])
def get_BM_EOS(cryst, systems):
"""Calculate Birch-Murnaghan Equation of State for the crystal.
The B-M equation of state is defined by:
.. math::
P(V)= \\frac{B_0}{B'_0}\\left[
\\left({\\frac{V}{V_0}}\\right)^{-B'_0} - 1
\\right]
It's coefficients are estimated using n single-point structures ganerated
from the crystal (cryst) by the scan_volumes function between two relative
volumes. The BM EOS is fitted to the computed points by
least squares method. The returned value is a list of fitted
parameters: :math:`V_0, B_0, B_0'` if the fit succeded.
If the fitting fails the ``RuntimeError('Calculation failed')`` is raised.
The data from the calculation and fit is stored in the bm_eos and pv
members of cryst for future reference. You have to provide properly
optimized structures in cryst and systems list.
:param cryst: Atoms object, basic structure
:param systems: A list of calculated structures
:returns: tuple of EOS parameters :math:`V_0, B_0, B_0'`.
"""
pvdat = array([[r.get_volume(),
get_pressure(r.get_stress()),
norm(r.get_cell()[:, 0]),
norm(r.get_cell()[:, 1]),
norm(r.get_cell()[:, 2])] for r in systems]).T
# Estimate the initial guess assuming b0p=1
# Limiting volumes
v1 = min(pvdat[0])
v2 = max(pvdat[0])
# The pressure is falling with the growing volume
p2 = min(pvdat[1])
p1 = max(pvdat[1])
b0 = (p1*v1-p2*v2)/(v2-v1)
v0 = v1*(p1+b0)/b0
# Initial guess
p0 = [v0, b0, 1]
# Fitting
try :
p1, succ = optimize.curve_fit(BMEOS, pvdat[0], pvdat[1], p0)
except (ValueError, RuntimeError, optimize.OptimizeWarning) as ex:
raise RuntimeError('Calculation failed')
cryst.bm_eos = p1
cryst.pv = pvdat
return cryst.bm_eos
def get_elementary_deformations(cryst, n=5, d=2):
'''Generate elementary deformations for elastic tensor calculation.
The deformations are created based on the symmetry of the crystal and
are limited to the non-equivalet axes of the crystal.
:param cryst: Atoms object, basic structure
:param n: integer, number of deformations per non-equivalent axis
:param d: float, size of the maximum deformation in percent and degrees
:returns: list of deformed structures
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
systems = []
for a in axis:
if a < 3: # tetragonal deformation
for dx in linspace(-d, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
elif a < 6: # sheer deformation (skip the zero angle)
for dx in linspace(d/10.0, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
return systems
def get_elastic_tensor(cryst, systems):
'''Calculate elastic tensor of the crystal.
The elastic tensor is calculated from the stress-strain relation
and derived by fitting this relation to the set of linear equations
build from the symmetry of the crystal and strains and stresses
of the set of elementary deformations of the unit cell.
It is assumed that the crystal is converged and optimized
under intended pressure/stress. The geometry and stress on the
cryst is taken as the reference point. No additional optimization
will be run. Structures in cryst and systems list must have calculated
stresses. The function returns tuple of :math:`C_{ij}` elastic tensor,
raw Birch coefficients :math:`B_{ij}` and fitting results: residuals,
solution rank, singular values returned by numpy.linalg.lstsq.
:param cryst: Atoms object, basic structure
:param systems: list of Atoms object with calculated deformed structures
:returns: tuple(:math:`C_{ij}` float vector,
tuple(:math:`B_{ij}` float vector, residuals, solution rank, singular values))
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
ul = []
sl = []
p = get_pressure(cryst.get_stress())
for g in systems:
ul.append(get_strain(g, refcell=cryst))
# Remove the ambient pressure from the stress tensor
sl.append(g.get_stress()-array([p, p, p, 0, 0, 0]))
# print(symm, ul)
eqm = array([symm(u) for u in ul])
# print(eqm)
# print(eqm[0].shape, eqm.shape)
eqm = reshape(eqm, (eqm.shape[0]*eqm.shape[1], eqm.shape[2]))
# print(eqm)
slm = reshape(array(sl), (-1,))
# print(eqm.shape, slm.shape)
# print(slm)
Bij = lstsq(eqm, slm)
# print(Bij[0] / units.GPa)
# Calculate elastic constants from Birch coeff.
# TODO: Check the sign of the pressure array in the B <=> C relation
if (symm == orthorombic):
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p])
elif (symm == tetragonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, -p])
elif (symm == regular):
Cij = Bij[0] - array([-p, p, -p])
elif (symm == trigonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, p])
elif (symm == hexagonal):
Cij = Bij[0] - array([-p, -p, p, p, -p])
elif (symm == monoclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p, p, p, p, p])
elif (symm == triclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p,
p, p, p, p, p, p, p, p, p])
return Cij, Bij
def scan_pressures(cryst, lo, hi, n=5, eos=None):
'''
Scan the pressure axis from lo to hi (inclusive)
using B-M EOS as the volume predictor.
Pressure (lo, hi) in GPa
'''
# Inverse B-M EOS to get volumes from pressures
# This will work only in limited pressure range p>-B/B'.
# Warning! Relative, the V0 prefactor is removed.
def invbmeos(b, bp, x):
return array([pow(b/(bp*xv+b), 1/(3*bp)) for xv in x])
if eos is None:
raise RuntimeError('Required EOS data missing')
# Limit negative pressures to 90% of the singularity value.
# Beyond this B-M EOS is bound to be wrong anyway.
lo = max(lo, -0.9*eos[1]/eos[2])
scale = (eos[0]/cryst.get_volume())*invbmeos(eos[1], eos[2],
linspace(lo, hi, num=n))
# print(scale)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def scan_volumes(cryst, lo=0.98, hi=1.02, n=5, scale_volumes=True):
'''
Provide set of crystals along volume axis from lo to hi (inclusive).
No volume cell optimization is performed. Bounds are specified as
fractions (1.10 = 10% increase). If scale_volumes==False the scalling
is applied to lattice vectors instead of volumes.
:param lo: lower bound of the V/V_0 in the scan
:param hi: upper bound of the V/V_0 in the scan
:param n: number of volume sample points
:param scale_volumes: If True scale the unit cell volume or,
if False, scale the length of lattice axes.
:returns: a list of deformed systems
'''
scale = linspace(lo, hi, num=n)
if scale_volumes:
scale **= (1.0/3.0)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def get_vecang_cell(cryst, uc=None):
'''
Compute A,B,C, alpha,beta,gamma cell params
from the unit cell matrix (uc) or cryst.
Angles in radians.
'''
if uc is None:
uc = cryst.get_cell()
ucv = [uc[i, :]/norm(uc[i, :]) for i in range(3)]
uca = [acos(dot(ucv[(i+1) % 3], ucv[(i+2) % 3])) for i in range(3)]
return [norm(uc[i, :]) for i in range(3)] + uca
def get_deformed_cell(base_cryst, axis=0, size=1):
'''
Return the cell (with atoms) deformed along one
cell parameter (0,1,2 = a,b,c ; 3,4,5 = alpha,beta,gamma) by
size percent or size degrees (axis/angles).
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
if axis < 3:
uc[axis, :] = (1+size/100.0)*uc[axis, :]
else:
(a, b, c, alp, bet, gam) = get_vecang_cell(cryst)
d = array([0.0, 0.0, 0.0])
d[axis-3] = pi*size/180
(alp, bet, gam) = array((alp, bet, gam))+d
t = 1 - (ctg(bet)*ctg(gam)-cos(alp)*csc(bet)*csc(gam))**2
if t < 0.0:
print('''
The parameters (alpha,beta,gamma)=(%f,%f,%f) are probably
incorrect and lead to imaginary coordinates.
This range of parameters is unsupported by this program
(and is, let me say, very strange for a crystal).
Cennot continue, bye.''' % (alp, bet, gam))
raise ValueError
else:
uc = [[a, 0.0, 0.0],
[b*cos(gam), b*sin(gam), 0],
[c*cos(bet),
c*(cos(alp)/sin(gam) - cos(bet)*ctg(gam)),
c*sin(bet)*sqrt(t)]]
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_strain(cryst, refcell=None):
'''Calculate strain tensor in the Voight notation
Computes the strain tensor in the Voight notation as a conventional
6-vector. The calculation is done with respect to the crystal
geometry passed in refcell parameter.
:param cryst: deformed structure
:param refcell: reference, undeformed structure
:returns: 6-vector of strain tensor in the Voight notation
'''
if refcell is None:
refcell = cryst
du = cryst.get_cell()-refcell.get_cell()
m = refcell.get_cell()
m = inv(m)
u = dot(m, du)
u = (u+u.T)/2
return array([u[0, 0], u[1, 1], u[2, 2], u[2, 1], u[2, 0], u[1, 0]])
if __name__ == '__main__':
from ase.spacegroup import crystal
a = 4.194
cryst = crystal(['Mg', 'O'],
[(0, 0, 0), (0.5, 0.5, 0.5)],
spacegroup=225,
cellpar=[a, a, a, 90, 90, 90])
sl = scan_volumes(cryst)
print('Volumes: ', end='')
for c in sl:
print('%.2f (%.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end=' ')
print()
sl = get_elementary_deformations(cryst)
print('Structures: ')
print(' Vol A B C alph bet gam')
for n, c in enumerate(sl):
print('%.4f (%5.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end='')
print((3*' %7.4f' + ' ' + 3*' %7.2f') %
tuple(c.get_cell_lengths_and_angles()))
|
jochym/Elastic | elastic/elastic.py | get_strain | python | def get_strain(cryst, refcell=None):
'''Calculate strain tensor in the Voight notation
Computes the strain tensor in the Voight notation as a conventional
6-vector. The calculation is done with respect to the crystal
geometry passed in refcell parameter.
:param cryst: deformed structure
:param refcell: reference, undeformed structure
:returns: 6-vector of strain tensor in the Voight notation
'''
if refcell is None:
refcell = cryst
du = cryst.get_cell()-refcell.get_cell()
m = refcell.get_cell()
m = inv(m)
u = dot(m, du)
u = (u+u.T)/2
return array([u[0, 0], u[1, 1], u[2, 2], u[2, 1], u[2, 0], u[1, 0]]) | Calculate strain tensor in the Voight notation
Computes the strain tensor in the Voight notation as a conventional
6-vector. The calculation is done with respect to the crystal
geometry passed in refcell parameter.
:param cryst: deformed structure
:param refcell: reference, undeformed structure
:returns: 6-vector of strain tensor in the Voight notation | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/elastic/elastic.py#L703-L722 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 1998-2017 by Paweł T. Jochym <pawel.jochym@ifj.edu.pl>
#
# This file is part of Elastic.
# Elastic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Elastic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Elastic. If not, see <http://www.gnu.org/licenses/>.
'''
.. _elastic-mod:
Elastic Module
^^^^^^^^^^^^^^
Elastic is a module for calculation of :math:`C_{ij}` components of elastic
tensor from the strain-stress relation.
The strain components here are ordered in standard way which is different
to ordering in previous versions of the code (up to 4.0).
The ordering is: :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}`.
The general ordering of :math:`C_{ij}` components is (except for triclinic
symmetry and taking into account customary names of constants - e.g.
:math:`C_{16} \\rightarrow C_{14}`):
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
The functions with the name of bravais lattices define the symmetry of the
:math:`C_{ij}` matrix. The matrix is N columns by 6 rows where the columns
corespond to independent elastic constants of the given crystal, while the rows
corespond to the canonical deformations of a crystal. The elements are the
second partial derivatives of the free energy formula for the crystal written
down as a quadratic form of the deformations with respect to elastic constant
and deformation.
*Note:*
The elements for deformations :math:`u_{xy}, u_{xz}, u_{yz}`
have to be divided by 2 to properly match the usual definition
of elastic constants.
See: [LL]_ L.D. Landau, E.M. Lifszyc, "Theory of elasticity"
There is some usefull summary also at:
`ScienceWorld <http://scienceworld.wolfram.com/physics/Elasticity.html>`_
---------
'''
from __future__ import print_function, division, absolute_import
import re
from ase.atoms import Atoms
try:
# Try new release of spglib
import spglib as spg
except ImportError:
# Old naming scheme
from pyspglib import spglib as spg
from scipy.linalg import norm, lstsq
from scipy import optimize
from numpy.linalg import inv
from numpy import dot, diag, ones, reshape, linspace, array, mean
from math import acos, pi, cos, sin, sqrt
def BMEOS(v, v0, b0, b0p):
return (b0/b0p)*(pow(v0/v, b0p) - 1)
def ctg(x):
return cos(x)/sin(x)
def csc(x):
return 1/sin(x)
def regular(u):
'''
Equation matrix generation for the regular (cubic) lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{12}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, uyy + uzz, 0],
[uyy, uxx + uzz, 0],
[uzz, uxx + uyy, 0],
[0, 0, 2*uyz],
[0, 0, 2*uxz],
[0, 0, 2*uxy]])
def tetragonal(u):
'''
Equation matrix generation for the tetragonal lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, uyy, uzz, 0, 0],
[uyy, 0, uxx, uzz, 0, 0],
[0, uzz, 0, uxx+uyy, 0, 0],
[0, 0, 0, 0, 0, 2*uxy],
[0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 2*uyz, 0]])
def orthorombic(u):
'''
Equation matrix generation for the orthorombic lattice.
The order of constants is as follows:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0, uyy, uzz, 0, 0, 0, 0],
[0, uyy, 0, uxx, 0, uzz, 0, 0, 0],
[0, 0, uzz, 0, uxx, uyy, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 2*uyz, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 2*uxz, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 2*uxy]])
def trigonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}, C_{14}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Not tested yet.
# TODO: There is still some doubt about the :math:`C_{14}` constant.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0, 2*uxz ],
[ uyy, 0, uxx, uzz, 0, -2*uxz ],
[ 0, uzz, 0, uxx+uyy, 0, 0 ],
[ 0, 0, 0, 0, 2*uyz, -4*uxy ],
[ 0, 0, 0, 0, 2*uxz, 2*(uxx-uyy)],
[ 2*uxy, 0, -2*uxy, 0, 0, -4*uyz ]])
def hexagonal(u):
'''
The matrix is constructed based on the approach from L&L
using auxiliary coordinates: :math:`\\xi=x+iy`, :math:`\\eta=x-iy`.
The components are calculated from free energy using formula
introduced in :ref:`symmetry` with appropriate coordinate changes.
The order of constants is as follows:
.. math::
C_{11}, C_{33}, C_{12}, C_{13}, C_{44}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# TODO: Still needs good verification
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[ uxx, 0, uyy, uzz, 0 ],
[ uyy, 0, uxx, uzz, 0 ],
[ 0, uzz, 0, uxx+uyy, 0 ],
[ 0, 0, 0, 0, 2*uyz ],
[ 0, 0, 0, 0, 2*uxz ],
[ 2*uxy, 0, -2*uxy, 0, 0 ]])
def monoclinic(u):
'''Monoclinic group,
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33}, C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66}, C_{16}, C_{26}, C_{36}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz, 0]])
def triclinic(u):
'''Triclinic crystals.
*Note*: This was never tested on the real case. Beware!
The ordering of constants is:
.. math::
C_{11}, C_{22}, C_{33},
C_{12}, C_{13}, C_{23},
C_{44}, C_{55}, C_{66},
C_{16}, C_{26}, C_{36}, C_{46}, C_{56},
C_{14}, C_{15}, C_{25}, C_{45}
:param u: vector of deformations:
[ :math:`u_{xx}, u_{yy}, u_{zz}, u_{yz}, u_{xz}, u_{xy}` ]
:returns: Symmetry defined stress-strain equation matrix
'''
# Based on the monoclinic matrix and not tested on real case.
# If you have test cases for this symmetry send them to the author.
uxx, uyy, uzz, uyz, uxz, uxy = u[0], u[1], u[2], u[3], u[4], u[5]
return array(
[[uxx, 0, 0,uyy,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0,uyz,uxz, 0, 0],
[ 0,uyy, 0,uxx, 0,uzz, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0,uxz, 0],
[ 0, 0,uzz, 0,uxx,uyy, 0, 0, 0, 0, 0,uxy, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0,2*uyz, 0, 0, 0, 0, 0,uxy, 0,uxx, 0, 0,uxz],
[ 0, 0, 0, 0, 0, 0, 0,2*uxz, 0, 0, 0, 0, 0,uxy, 0,uxx,uyy,uyz],
[ 0, 0, 0, 0, 0, 0, 0, 0,2*uxy,uxx,uyy,uzz,uyz,uxz, 0, 0, 0, 0]])
def get_cij_order(cryst):
'''Give order of of elastic constants for the structure
:param cryst: ASE Atoms object
:returns: Order of elastic constants as a tuple of strings: C_ij
'''
orders = {
1: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36',
'C_46', 'C_56', 'C_14', 'C_15', 'C_25', 'C_45'),
2: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23',
'C_44', 'C_55', 'C_66', 'C_16', 'C_26', 'C_36', 'C_45'),
3: ('C_11', 'C_22', 'C_33', 'C_12', 'C_13', 'C_23', 'C_44',
'C_55', 'C_66'),
4: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
5: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44', 'C_14'),
6: ('C_11', 'C_33', 'C_12', 'C_13', 'C_44'),
7: ('C_11', 'C_12', 'C_44'),
}
return orders[get_lattice_type(cryst)[0]]
def get_lattice_type(cryst):
'''Find the symmetry of the crystal using spglib symmetry finder.
Derive name of the space group and its number extracted from the result.
Based on the group number identify also the lattice type and the Bravais
lattice of the crystal. The lattice type numbers are
(the numbering starts from 1):
Triclinic (1), Monoclinic (2), Orthorombic (3),
Tetragonal (4), Trigonal (5), Hexagonal (6), Cubic (7)
:param cryst: ASE Atoms object
:returns: tuple (lattice type number (1-7), lattice name, space group
name, space group number)
'''
# Table of lattice types and correcponding group numbers dividing
# the ranges. See get_lattice_type method for precise definition.
lattice_types = [
[3, "Triclinic"],
[16, "Monoclinic"],
[75, "Orthorombic"],
[143, "Tetragonal"],
[168, "Trigonal"],
[195, "Hexagonal"],
[231, "Cubic"]
]
sg = spg.get_spacegroup(cryst)
m = re.match(r'([A-Z].*\b)\s*\(([0-9]*)\)', sg)
sg_name = m.group(1)
sg_nr = int(m.group(2))
for n, l in enumerate(lattice_types):
if sg_nr < l[0]:
bravais = l[1]
lattype = n+1
break
return lattype, bravais, sg_name, sg_nr
def get_bulk_modulus(cryst):
'''Calculate bulk modulus using the Birch-Murnaghan equation of state.
The EOS must be previously calculated by get_BM_EOS routine.
The returned bulk modulus is a :math:`B_0` coefficient of the B-M EOS.
The units of the result are defined by ASE. To get the result in
any particular units (e.g. GPa) you need to divide it by
ase.units.<unit name>::
get_bulk_modulus(cryst)/ase.units.GPa
:param cryst: ASE Atoms object
:returns: float, bulk modulus :math:`B_0` in ASE units.
'''
if getattr(cryst, 'bm_eos', None) is None:
raise RuntimeError('Missing B-M EOS data.')
cryst.bulk_modulus = cryst.bm_eos[1]
return cryst.bulk_modulus
def get_pressure(s):
'''Return *external* isotropic (hydrostatic) pressure in ASE units.
If the pressure is positive the system is under external pressure.
This is a convenience function to convert output of get_stress function
into external pressure.
:param cryst: stress tensor in Voight (vector) notation as returned by
the get_stress() method.
:returns: float, external hydrostatic pressure in ASE units.
'''
return -mean(s[:3])
def get_BM_EOS(cryst, systems):
"""Calculate Birch-Murnaghan Equation of State for the crystal.
The B-M equation of state is defined by:
.. math::
P(V)= \\frac{B_0}{B'_0}\\left[
\\left({\\frac{V}{V_0}}\\right)^{-B'_0} - 1
\\right]
It's coefficients are estimated using n single-point structures ganerated
from the crystal (cryst) by the scan_volumes function between two relative
volumes. The BM EOS is fitted to the computed points by
least squares method. The returned value is a list of fitted
parameters: :math:`V_0, B_0, B_0'` if the fit succeded.
If the fitting fails the ``RuntimeError('Calculation failed')`` is raised.
The data from the calculation and fit is stored in the bm_eos and pv
members of cryst for future reference. You have to provide properly
optimized structures in cryst and systems list.
:param cryst: Atoms object, basic structure
:param systems: A list of calculated structures
:returns: tuple of EOS parameters :math:`V_0, B_0, B_0'`.
"""
pvdat = array([[r.get_volume(),
get_pressure(r.get_stress()),
norm(r.get_cell()[:, 0]),
norm(r.get_cell()[:, 1]),
norm(r.get_cell()[:, 2])] for r in systems]).T
# Estimate the initial guess assuming b0p=1
# Limiting volumes
v1 = min(pvdat[0])
v2 = max(pvdat[0])
# The pressure is falling with the growing volume
p2 = min(pvdat[1])
p1 = max(pvdat[1])
b0 = (p1*v1-p2*v2)/(v2-v1)
v0 = v1*(p1+b0)/b0
# Initial guess
p0 = [v0, b0, 1]
# Fitting
try :
p1, succ = optimize.curve_fit(BMEOS, pvdat[0], pvdat[1], p0)
except (ValueError, RuntimeError, optimize.OptimizeWarning) as ex:
raise RuntimeError('Calculation failed')
cryst.bm_eos = p1
cryst.pv = pvdat
return cryst.bm_eos
def get_elementary_deformations(cryst, n=5, d=2):
'''Generate elementary deformations for elastic tensor calculation.
The deformations are created based on the symmetry of the crystal and
are limited to the non-equivalet axes of the crystal.
:param cryst: Atoms object, basic structure
:param n: integer, number of deformations per non-equivalent axis
:param d: float, size of the maximum deformation in percent and degrees
:returns: list of deformed structures
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
systems = []
for a in axis:
if a < 3: # tetragonal deformation
for dx in linspace(-d, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
elif a < 6: # sheer deformation (skip the zero angle)
for dx in linspace(d/10.0, d, n):
systems.append(
get_cart_deformed_cell(cryst, axis=a, size=dx))
return systems
def get_elastic_tensor(cryst, systems):
'''Calculate elastic tensor of the crystal.
The elastic tensor is calculated from the stress-strain relation
and derived by fitting this relation to the set of linear equations
build from the symmetry of the crystal and strains and stresses
of the set of elementary deformations of the unit cell.
It is assumed that the crystal is converged and optimized
under intended pressure/stress. The geometry and stress on the
cryst is taken as the reference point. No additional optimization
will be run. Structures in cryst and systems list must have calculated
stresses. The function returns tuple of :math:`C_{ij}` elastic tensor,
raw Birch coefficients :math:`B_{ij}` and fitting results: residuals,
solution rank, singular values returned by numpy.linalg.lstsq.
:param cryst: Atoms object, basic structure
:param systems: list of Atoms object with calculated deformed structures
:returns: tuple(:math:`C_{ij}` float vector,
tuple(:math:`B_{ij}` float vector, residuals, solution rank, singular values))
'''
# Deformation look-up table
# Perhaps the number of deformations for trigonal
# system could be reduced to [0,3] but better safe then sorry
deform = {
"Cubic": [[0, 3], regular],
"Hexagonal": [[0, 2, 3, 5], hexagonal],
"Trigonal": [[0, 1, 2, 3, 4, 5], trigonal],
"Tetragonal": [[0, 2, 3, 5], tetragonal],
"Orthorombic": [[0, 1, 2, 3, 4, 5], orthorombic],
"Monoclinic": [[0, 1, 2, 3, 4, 5], monoclinic],
"Triclinic": [[0, 1, 2, 3, 4, 5], triclinic]
}
lattyp, brav, sg_name, sg_nr = get_lattice_type(cryst)
# Decide which deformations should be used
axis, symm = deform[brav]
ul = []
sl = []
p = get_pressure(cryst.get_stress())
for g in systems:
ul.append(get_strain(g, refcell=cryst))
# Remove the ambient pressure from the stress tensor
sl.append(g.get_stress()-array([p, p, p, 0, 0, 0]))
# print(symm, ul)
eqm = array([symm(u) for u in ul])
# print(eqm)
# print(eqm[0].shape, eqm.shape)
eqm = reshape(eqm, (eqm.shape[0]*eqm.shape[1], eqm.shape[2]))
# print(eqm)
slm = reshape(array(sl), (-1,))
# print(eqm.shape, slm.shape)
# print(slm)
Bij = lstsq(eqm, slm)
# print(Bij[0] / units.GPa)
# Calculate elastic constants from Birch coeff.
# TODO: Check the sign of the pressure array in the B <=> C relation
if (symm == orthorombic):
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p])
elif (symm == tetragonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, -p])
elif (symm == regular):
Cij = Bij[0] - array([-p, p, -p])
elif (symm == trigonal):
Cij = Bij[0] - array([-p, -p, p, p, -p, p])
elif (symm == hexagonal):
Cij = Bij[0] - array([-p, -p, p, p, -p])
elif (symm == monoclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p, p, p, p, p])
elif (symm == triclinic):
# TODO: verify this pressure array
Cij = Bij[0] - array([-p, -p, -p, p, p, p, -p, -p, -p,
p, p, p, p, p, p, p, p, p])
return Cij, Bij
def scan_pressures(cryst, lo, hi, n=5, eos=None):
'''
Scan the pressure axis from lo to hi (inclusive)
using B-M EOS as the volume predictor.
Pressure (lo, hi) in GPa
'''
# Inverse B-M EOS to get volumes from pressures
# This will work only in limited pressure range p>-B/B'.
# Warning! Relative, the V0 prefactor is removed.
def invbmeos(b, bp, x):
return array([pow(b/(bp*xv+b), 1/(3*bp)) for xv in x])
if eos is None:
raise RuntimeError('Required EOS data missing')
# Limit negative pressures to 90% of the singularity value.
# Beyond this B-M EOS is bound to be wrong anyway.
lo = max(lo, -0.9*eos[1]/eos[2])
scale = (eos[0]/cryst.get_volume())*invbmeos(eos[1], eos[2],
linspace(lo, hi, num=n))
# print(scale)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def scan_volumes(cryst, lo=0.98, hi=1.02, n=5, scale_volumes=True):
'''
Provide set of crystals along volume axis from lo to hi (inclusive).
No volume cell optimization is performed. Bounds are specified as
fractions (1.10 = 10% increase). If scale_volumes==False the scalling
is applied to lattice vectors instead of volumes.
:param lo: lower bound of the V/V_0 in the scan
:param hi: upper bound of the V/V_0 in the scan
:param n: number of volume sample points
:param scale_volumes: If True scale the unit cell volume or,
if False, scale the length of lattice axes.
:returns: a list of deformed systems
'''
scale = linspace(lo, hi, num=n)
if scale_volumes:
scale **= (1.0/3.0)
uc = cryst.get_cell()
systems = [Atoms(cryst) for s in scale]
for n, s in enumerate(scale):
systems[n].set_cell(s*uc, scale_atoms=True)
return systems
def get_vecang_cell(cryst, uc=None):
'''
Compute A,B,C, alpha,beta,gamma cell params
from the unit cell matrix (uc) or cryst.
Angles in radians.
'''
if uc is None:
uc = cryst.get_cell()
ucv = [uc[i, :]/norm(uc[i, :]) for i in range(3)]
uca = [acos(dot(ucv[(i+1) % 3], ucv[(i+2) % 3])) for i in range(3)]
return [norm(uc[i, :]) for i in range(3)] + uca
def get_deformed_cell(base_cryst, axis=0, size=1):
'''
Return the cell (with atoms) deformed along one
cell parameter (0,1,2 = a,b,c ; 3,4,5 = alpha,beta,gamma) by
size percent or size degrees (axis/angles).
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
if axis < 3:
uc[axis, :] = (1+size/100.0)*uc[axis, :]
else:
(a, b, c, alp, bet, gam) = get_vecang_cell(cryst)
d = array([0.0, 0.0, 0.0])
d[axis-3] = pi*size/180
(alp, bet, gam) = array((alp, bet, gam))+d
t = 1 - (ctg(bet)*ctg(gam)-cos(alp)*csc(bet)*csc(gam))**2
if t < 0.0:
print('''
The parameters (alpha,beta,gamma)=(%f,%f,%f) are probably
incorrect and lead to imaginary coordinates.
This range of parameters is unsupported by this program
(and is, let me say, very strange for a crystal).
Cennot continue, bye.''' % (alp, bet, gam))
raise ValueError
else:
uc = [[a, 0.0, 0.0],
[b*cos(gam), b*sin(gam), 0],
[c*cos(bet),
c*(cos(alp)/sin(gam) - cos(bet)*ctg(gam)),
c*sin(bet)*sqrt(t)]]
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
def get_cart_deformed_cell(base_cryst, axis=0, size=1):
'''Return the cell deformed along one of the cartesian directions
Creates new deformed structure. The deformation is based on the
base structure and is performed along single axis. The axis is
specified as follows: 0,1,2 = x,y,z ; sheers: 3,4,5 = yz, xz, xy.
The size of the deformation is in percent and degrees, respectively.
:param base_cryst: structure to be deformed
:param axis: direction of deformation
:param size: size of the deformation
:returns: new, deformed structure
'''
cryst = Atoms(base_cryst)
uc = base_cryst.get_cell()
s = size/100.0
L = diag(ones(3))
if axis < 3:
L[axis, axis] += s
else:
if axis == 3:
L[1, 2] += s
elif axis == 4:
L[0, 2] += s
else:
L[0, 1] += s
uc = dot(uc, L)
cryst.set_cell(uc, scale_atoms=True)
# print(cryst.get_cell())
# print(uc)
return cryst
if __name__ == '__main__':
from ase.spacegroup import crystal
a = 4.194
cryst = crystal(['Mg', 'O'],
[(0, 0, 0), (0.5, 0.5, 0.5)],
spacegroup=225,
cellpar=[a, a, a, 90, 90, 90])
sl = scan_volumes(cryst)
print('Volumes: ', end='')
for c in sl:
print('%.2f (%.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end=' ')
print()
sl = get_elementary_deformations(cryst)
print('Structures: ')
print(' Vol A B C alph bet gam')
for n, c in enumerate(sl):
print('%.4f (%5.1f%%)' % (c.get_volume(),
100*c.get_volume()/cryst.get_volume()),
end='')
print((3*' %7.4f' + ' ' + 3*' %7.2f') %
tuple(c.get_cell_lengths_and_angles()))
|
jochym/Elastic | parcalc/parcalc.py | work_dir | python | def work_dir(path):
'''
Context menager for executing commands in some working directory.
Returns to the previous wd when finished.
Usage:
>>> with work_dir(path):
... subprocess.call('git status')
'''
starting_directory = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(starting_directory) | Context menager for executing commands in some working directory.
Returns to the previous wd when finished.
Usage:
>>> with work_dir(path):
... subprocess.call('git status') | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/parcalc/parcalc.py#L79-L94 | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2011 by Pawel T. Jochym <pawel.jochym@ifj.edu.pl>
#
# This file is part of Elastic.
# Elastic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Elastic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Elastic. If not, see <http://www.gnu.org/licenses/>.
'''
.. _par-calc-mod:
Parallel Calculator Module
^^^^^^^^^^^^^^^^^^^^^^^^^^
Parallel calculator module is an extension of the standard
`ASE <https://wiki.fysik.dtu.dk/ase/>`_ calculator working in the
parallel cluster environment. It is very useful in all situations where
you need to run several, independent calculations and you have a large
cluster of machines at your disposal (probably with some queuing system).
This implementation uses VASP but the code can be easily adapted for use
with other ASE calculators with minor changes.
The final goal is to provide a universal module for parallel
calculator execution in the cluster environment.
The SIESTA code by Georgios Tritsaris <gtritsaris@seas.harvard.edu>
Not fully tested after merge.
'''
from __future__ import print_function, division
import logging
import ase
from ase.calculators.vasp import Vasp
from ase.calculators.siesta import Siesta
from ase.calculators.aims import Aims
from ase.calculators.calculator import Calculator, FileIOCalculator, all_changes
try : # Python3
from queue import Empty
except ImportError : # Python2
from Queue import Empty
from multiprocessing import Process, Queue
import time
import os
import tempfile
import shutil
from copy import deepcopy
from subprocess import check_output
from contextlib import contextmanager
class _NonBlockingRunException(Exception):
'''
Internal exception. Should never be propagated outside.
'''
def __str__(self):
return '''The __NonBlockingRunException should be caught inside
the calculator class. If you got it outside it is a bug.
Contact the author and/or submit a bug ticket at github.'''
from traceback import print_stack
@contextmanager
class ClusterVasp(Vasp):
'''
Adaptation of VASP calculator to the cluster environment where you often
have to make some preparations before job submission. You can easily
adapt this class to your particular environment. It is also easy to
use this as a template for other type of calculator.
'''
def __init__(self, nodes=1, ppn=8, block=True, ncl=False, **kwargs):
Vasp.__init__(self, **kwargs)
self.nodes=nodes
self.ppn=ppn
self.block=block
self.ncl=ncl
self.calc_running=False
self.working_dir=os.getcwd()
def prepare_calc_dir(self):
'''
Prepare the calculation directory for VASP execution.
This needs to be re-implemented for each local setup.
The following code reflects just my particular setup.
'''
with open("vasprun.conf","w") as f:
f.write('NODES="nodes=%s:ppn=%d"\n' % (self.nodes, self.ppn))
f.write('BLOCK=%d\n' % (self.block,))
if self.ncl :
f.write('NCL=%d\n' % (1,))
#print(self.nodes, self.ppn)
def calc_finished(self):
'''
Check if the lockfile is in the calculation directory.
It is removed by the script at the end regardless of the
success of the calculation. This is totally tied to
implementation and you need to implement your own scheme!
'''
#print_stack(limit=5)
if not self.calc_running :
#print('Calc running:',self.calc_running)
return True
else:
# The calc is marked as running check if this is still true
# We do it by external scripts. You need to write these
# scripts for your own system.
# See examples/scripts directory for examples.
with work_dir(self.working_dir) :
o=check_output(['check-job'])
#print('Status',o)
if o[0] in b'R' :
# Still running - we do nothing to preserve the state
return False
else :
# The job is not running maybe it finished maybe crashed
# We hope for the best at this point ad pass to the
# Standard update function
return True
def set(self,**kwargs):
if 'block' in kwargs :
self.block=kwargs['block']
del kwargs['block']
else :
self.block=True
if 'ncl' in kwargs :
self.ncl=kwargs['ncl']
del kwargs['ncl']
else :
self.ncl=False
Vasp.set(self, **kwargs)
def clean(self):
with work_dir(self.working_dir) :
Vasp.clean(self)
def update(self, atoms):
if self.calc_running :
# we have started the calculation and have
# nothing to read really. But we need to check
# first if this is still true.
if self.calc_finished():
# We were running but recently finished => read the results
# This is a piece of copy-and-paste programming
# This is a copy of code from Vasp.calculate
self.calc_running=False
with work_dir(self.working_dir) :
atoms_sorted = ase.io.read('CONTCAR', format='vasp')
if self.int_params['ibrion'] > -1 and self.int_params['nsw'] > 0:
# Update atomic positions and unit cell with the ones read
# from CONTCAR.
atoms.positions = atoms_sorted[self.resort].positions
atoms.cell = atoms_sorted.cell
self.converged = self.read_convergence()
Vasp.set_results(self,atoms)
return
else :
return
# We are not in the middle of calculation.
# Update as normal
Vasp.update(self, atoms)
def set_results(self, atoms):
with work_dir(self.working_dir) :
#print('set_results')
Vasp.set_results(self, atoms)
def run(self):
'''
Blocking/Non-blocing run method.
In blocking mode it just runs parent run method.
In non-blocking mode it raises the __NonBlockingRunException
to bail out of the processing of standard calculate method
(or any other method in fact) and signal that the data is not
ready to be collected.
'''
# This is only called from self.calculate - thus
# we do not need to change to working_dir
# since calculate already did
Vasp.run(self)
if not self.block :
#print('Interrupt processing of calculate', os.getcwd())
raise _NonBlockingRunException
def calculate(self, atoms):
'''
Blocking/Non-blocking calculate method
If we are in blocking mode we just run, wait for
the job to end and read in the results. Easy ...
The non-blocking mode is a little tricky.
We need to start the job and guard against it reading
back possible old data from the directory - the queuing
system may not even started the job when we get control
back from the starting script. Thus anything we read
after invocation is potentially garbage - even if it
is a converged calculation data.
We handle it by custom run function above which
raises an exception after submitting the job.
This skips post-run processing in the calculator, preserves
the state of the data and signals here that we need to wait
for results.
'''
with work_dir(self.working_dir) :
self.prepare_calc_dir()
self.calc_running=True
#print('Run VASP.calculate')
try :
Vasp.calculate(self, atoms)
self.calc_running=False
#print('VASP.calculate returned')
except _NonBlockingRunException as e:
# We have nothing else to docs
# until the job finishes
#print('Interrupted ', self.working_dir, os.getcwd())
pass
class ClusterSiesta(Siesta):
'''
Siesta calculator. Not fully tested by me - so this should be considered
beta quality. Nevertheless it is based on working implementation
'''
def __init__(self, nodes=1, ppn=8, **kwargs):
Siesta.__init__(self, **kwargs)
self.nodes=nodes
self.ppn=ppn
def prepare_calc_dir(self):
with open("siestarun.conf","w") as f:
f.write('NODES="nodes=%d:ppn=%d"' % (self.nodes, self.ppn))
#print(self.nodes, self.ppn)
def get_potential_energy(self, atoms):
self.prepare_calc_dir()
Siesta.get_potential_energy(self, atoms)
def clean(self):
self.converged = False
return
class ClusterAims(Aims):
'''
Encapsulating Aims calculator for the cluster environment.
'''
def __init__(self, nodes=1, ppn=8, **kwargs):
Aims.__init__(self, **kwargs)
self.nodes=nodes
self.ppn=ppn
def prepare_calc_dir(self):
with open("siestarun.conf","w") as f:
f.write('NODES="nodes=%d:ppn=%d"' % (self.nodes, self.ppn))
#print(self.nodes, self.ppn)
def run(self):
self.prepare_calc_dir()
Aims.run(self)
class RemoteCalculator(Calculator):
'''
Remote calculator based on ASE calculator class.
This class is only involved with the machanics of remotly executing
the software and transporting the data. The calculation is
delegated to the actual calculator class.
'''
# Queue system submit command
qsub_tool='qsub'
qstat_tool='qstat'
qsub_cmd='cd %(rdir)s ; %(qsub_tool)s -N %(title)s -l procs=%(procs)d ./run-pw.pbs'
# Remote execution command
remote_exec_cmd='ssh %(user)s@%(host)s "%(command)s"'
# If you cannot mount the data directory into your system it is best
# to use the rsync command to transfer the results back into the system.
# Command for copying the data out to the computing system
copy_out_cmd='rsync -a "%(ldir)s" "%(user)s@%(host)s:%(rdir)s"'
# Command for copying the data in after the calculation
copy_in_cmd='rsync -a "%(user)s@%(host)s:%(rdir)s" "%(ldir)s"'
# Template for the PBS batch job
pbs_template=''
# Command to check the state of the job
pbs_check_cmd='''%(qstat_tool)s -f %(jobid)s |grep job_state |awk '{print $3}' '''
# Access data
host=''
user=''
# Location:
# local working directory
wdir='.'
# Remote working directory relative to the home directory or absolute
rdir='.'
# Repetition timer (seconds) for checkin the state of the job.
job_check_time=15
def __init__(self, restart=None, ignore_bad_restart_file=False, label=None,
atoms=None, calc=None, block=False, **kwargs):
'''Basic calculator implementation.
restart: str
Prefix for restart file. May contain a directory. Default
is None: don't restart.
ignore_bad_restart_file: bool
Ignore broken or missing restart file. By default, it is an
error if the restart file is missing or broken.
label: str
Name used for all files. May contain a directory.
atoms: Atoms object
Optional Atoms object to which the calculator will be
attached. When restarting, atoms will get its positions and
unit-cell updated from file.
Create a remote execution calculator based on actual ASE calculator
calc.
'''
logging.debug("Calc: %s Label: %s" % (calc, label))
Calculator.__init__(self, restart, ignore_bad_restart_file, label, atoms, **kwargs)
logging.debug("Dir: %s Ext: %s" % (self.directory, self.ext))
self.calc=calc
self.jobid=None
self.block=block
def write_pbs_in(self,properties):
with work_dir(self.directory):
with open(os.path.join(self.directory,'run-ase-calc.pbs'),'w') as fh:
fh.write(self.pbs_template % {
'command': self.build_command(self,prop=properties,
params=self.parameters)
})
def build_command(self,prop=['energy'],params={}):
cmd=self.qsub_cmd % {
'qsub_tool': self.qsub_tool,
'qstat_tool': self.qstat_tool,
'title': self.label,
'procs': self.parameters['procs'],
'rdir': os.path.join(self.parameters['rdir'],os.path.split(self.directory)[-1])
}
cmd=self.remote_exec_cmd % {
'command': cmd,
'user': self.parameters['user'],
'host': self.parameters['host']
}
return cmd
def write_input(self, atoms=None, properties=['energy'], system_changes=all_changes):
'''Write input file(s).'''
with work_dir(self.directory):
self.calc.write_input(self, atoms, properties, system_changes)
self.write_pbs_in(properties)
subprocess.call(self.copy_out_cmd % {
'ldir': self.directory,
'rdir': self.parameters['rdir'],
'user': self.parameters['user'],
'host': self.parameters['host']
}, shell=True)
def job_ready(self):
try :
cmd=self.remote_exec_cmd % {
'command': self.pbs_check_cmd % {
'qsub_tool': self.qsub_tool,
'qstat_tool': self.qstat_tool,
'jobid':self.jobid
},
'user': self.parameters['user'],
'host': self.parameters['host']
}
state=subprocess.check_output(cmd, shell=True).split()[-1]
except (subprocess.CalledProcessError, IndexError) :
# Unknown state. We assume it has finished and continue
state='N'
return not (state in ['Q','R'])
def run_calculation(self, atoms=None, properties=['energy'],
system_changes=all_changes):
'''
Internal calculation executor. We cannot use FileIOCalculator
directly since we need to support remote execution.
This calculator is different from others.
It prepares the directory, launches the remote process and
raises the exception to signal that we need to come back for results
when the job is finished.
'''
self.calc.calculate(self, atoms, properties, system_changes)
self.write_input(self.atoms, properties, system_changes)
if self.command is None:
raise RuntimeError('Please configure Remote calculator!')
olddir = os.getcwd()
errorcode=0
try:
os.chdir(self.directory)
output = subprocess.check_output(self.command, shell=True)
self.jobid=output.split()[0]
self.submited=True
#print "Job %s submitted. Waiting for it." % (self.jobid)
# Waiting loop. To be removed.
except subprocess.CalledProcessError as e:
errorcode=e.returncode
finally:
os.chdir(olddir)
if errorcode:
raise RuntimeError('%s returned an error: %d' %
(self.name, errorcode))
self.read_results()
def read_results(self):
"""Read energy, forces, ... from output file(s)."""
if self.submited:
# The job has been submitted. Check the state.
if not self.job_ready() :
if self.block :
while not self.job_ready() :
time.sleep(self.job_check_time)
else :
raise CalcNotReadyError
# Assume the calc finished. Copy the files back.
subprocess.call(self.copy_in_cmd % {
'ldir': self.wdir,
'rdir': os.path.join(self.parameters['rdir'],os.path.split(self.directory)[-1]),
'user': self.parameters['user'],
'host': self.parameters['host']
}, shell=True)
fn=os.path.join(self.directory,'pw.out')
# Read the pan-ultimate line of the output file
try:
ln=open(fn).readlines()[-2]
if ln.find('JOB DONE.')>-1 :
# Job is done we can read the output
r=read_quantumespresso_textoutput(fn)
self.submited=False
self.jobid=None
else :
# Job not ready.
raise CalcNotReadyError
except (IOError, IndexError) :
# Job not ready.
raise CalcNotReadyError
# All is fine - really read the results
self.calc.read_results(self)
@classmethod
def ParallelCalculate(cls,syslst,properties=['energy'],system_changes=all_changes):
'''
Run a series of calculations in parallel using (implicitely) some
remote machine/cluster. The function returns the list of systems ready
for the extraction of calculated properties.
'''
print('Launching:',end=' ')
sys.stdout.flush()
for n,s in enumerate(syslst):
try :
s.calc.block=False
s.calc.calculate(atoms=s,properties=properties,system_changes=system_changes)
except CalcNotReadyError:
s.calc.block=True
print(n+1, end=' ')
sys.stdout.flush()
print()
print(' Done:', end=' ')
sys.stdout.flush()
for n,s in enumerate(syslst):
s.calc.read_results()
print( n+1, end=' ')
sys.stdout.flush()
print()
return syslst
verbose=True
class __PCalcProc(Process):
'''
Internal helper class representing the calculation process isolated
from the rest of the ASE script. The process (not thread) runs in
the separate directory, created on-the-fly and removed at the end
if the cleanup is true and we are in blocking (default) mode.
In this mode it is vital for the calculator to read in all the
results after the run since the files will be removed as soon as the
"calculate" function terminates. You can pass False to the cleanup
argument to prevent the clean-up. This is very usefull for debuging.
The process waits without any time-out for the calculation to finish.
This is great for short and simple calculations and quick testing.
You run the job and get back your results.
'''
def __init__(self, iq, oq, calc, prefix, cleanup=True):
Process.__init__(self)
self.calc=calc
self.basedir=os.getcwd()
self.place=tempfile.mkdtemp(prefix=prefix, dir=self.basedir)
self.iq=iq
self.oq=oq
self.CleanUp=cleanup
def run(self):
with work_dir(self.place) :
n,system=self.iq.get()
system.set_calculator(deepcopy(self.calc))
system.get_calculator().block=True
system.get_calculator().working_dir=self.place
#print("Start at :", self.place)
if hasattr(self.calc, 'name') and self.calc.name=='Siesta':
system.get_potential_energy()
else:
system.get_calculator().calculate(system)
#print("Finito: ", os.getcwd(), system.get_volume(), system.get_pressure())
self.oq.put([n,system])
if self.CleanUp :
system.get_calculator().clean()
os.chdir(self.basedir)
shutil.rmtree(self.place, ignore_errors=True)
def ParCalculate(systems,calc,cleanup=True,block=True,prefix="Calc_"):
'''
Run calculators in parallel for all systems.
Calculators are executed in isolated processes and directories.
The resulting objects are returned in the list (one per input system).
'''
if type(systems) != type([]) :
sysl=[systems]
else :
sysl=systems
if block :
iq=Queue(len(sysl)+1)
oq=Queue(len(sysl)+1)
# Create workers
for s in sysl:
__PCalcProc(iq, oq, calc, prefix=prefix, cleanup=cleanup).start()
# Put jobs into the queue
for n,s in enumerate(sysl):
iq.put([n,s])
# Protection against too quick insertion
time.sleep(0.2)
if verbose :
print("Workers started:", len(sysl))
# Collect the results
res=[]
while len(res)<len(sysl) :
n,s=oq.get()
res.append([n,s])
#print("Got from oq:", n, s.get_volume(), s.get_pressure())
else :
# We do not need the multiprocessing complications for non-blocking
# workers. We just run all in sequence.
basedir=os.getcwd()
res=[]
for n,s in enumerate(sysl):
s.set_calculator(deepcopy(calc))
s.get_calculator().block=block
place=tempfile.mkdtemp(prefix=prefix, dir=basedir)
os.chdir(place)
s.get_calculator().working_dir=place
#print("Start at :", place)
if hasattr(calc, 'name') and calc.name=='Siesta':
s.get_potential_energy()
else:
s.get_calculator().calculate(s)
os.chdir(basedir)
#print("Submited", s.get_calculator().calc_finished(), os.getcwd())
# Protection against too quick insertion
time.sleep(0.2)
res.append([n,s])
if verbose :
print("Workers started:", len(sysl))
return [r for ns,s in enumerate(sysl) for nr,r in res if nr==ns]
# Testing routines using VASP as a calculator in the cluster environment.
# TODO: Make it calculator/environment agnostic
if __name__ == '__main__':
from ase.lattice.spacegroup import crystal
from ase.units import GPa
import elastic
import numpy
from pylab import *
a = 4.291
MgO = crystal(['Mg', 'O'], [(0, 0, 0), (0.5, 0.5, 0.5)], spacegroup=225,
cellpar=[a, a, a, 90, 90, 90])
##################################
# Provide your own calculator here
##################################
calc=ClusterVasp(nodes=1,ppn=8)
# The calculator must be runnable in an isolated directory
# Without disturbing other running instances of the same calculator
# They are run in separate processes (not threads!)
MgO.set_calculator(calc)
calc.set(prec = 'Accurate', xc = 'PBE', lreal = False, isif=2, nsw=20, ibrion=2, kpts=[1,1,1])
print("Residual pressure: %.3f GPa" % (MgO.get_pressure()/GPa))
calc.clean()
systems=[]
for av in numpy.linspace(a*0.95,a*1.05,5):
systems.append(crystal(['Mg', 'O'], [(0, 0, 0), (0.5, 0.5, 0.5)], spacegroup=225,
cellpar=[av, av, av, 90, 90, 90]))
pcalc=ClusterVasp(nodes=1,ppn=8)
pcalc.set(prec = 'Accurate', xc = 'PBE', lreal = False, isif=2, nsw=20, ibrion=2, kpts=[1,1,1])
res=ParCalculate(systems,pcalc)
v=[]
p=[]
for s in res :
v.append(s.get_volume())
p.append(s.get_pressure()/GPa)
plot(v,p,'o')
show()
|
jochym/Elastic | parcalc/parcalc.py | ParCalculate | python | def ParCalculate(systems,calc,cleanup=True,block=True,prefix="Calc_"):
'''
Run calculators in parallel for all systems.
Calculators are executed in isolated processes and directories.
The resulting objects are returned in the list (one per input system).
'''
if type(systems) != type([]) :
sysl=[systems]
else :
sysl=systems
if block :
iq=Queue(len(sysl)+1)
oq=Queue(len(sysl)+1)
# Create workers
for s in sysl:
__PCalcProc(iq, oq, calc, prefix=prefix, cleanup=cleanup).start()
# Put jobs into the queue
for n,s in enumerate(sysl):
iq.put([n,s])
# Protection against too quick insertion
time.sleep(0.2)
if verbose :
print("Workers started:", len(sysl))
# Collect the results
res=[]
while len(res)<len(sysl) :
n,s=oq.get()
res.append([n,s])
#print("Got from oq:", n, s.get_volume(), s.get_pressure())
else :
# We do not need the multiprocessing complications for non-blocking
# workers. We just run all in sequence.
basedir=os.getcwd()
res=[]
for n,s in enumerate(sysl):
s.set_calculator(deepcopy(calc))
s.get_calculator().block=block
place=tempfile.mkdtemp(prefix=prefix, dir=basedir)
os.chdir(place)
s.get_calculator().working_dir=place
#print("Start at :", place)
if hasattr(calc, 'name') and calc.name=='Siesta':
s.get_potential_energy()
else:
s.get_calculator().calculate(s)
os.chdir(basedir)
#print("Submited", s.get_calculator().calc_finished(), os.getcwd())
# Protection against too quick insertion
time.sleep(0.2)
res.append([n,s])
if verbose :
print("Workers started:", len(sysl))
return [r for ns,s in enumerate(sysl) for nr,r in res if nr==ns] | Run calculators in parallel for all systems.
Calculators are executed in isolated processes and directories.
The resulting objects are returned in the list (one per input system). | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/parcalc/parcalc.py#L578-L637 | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2011 by Pawel T. Jochym <pawel.jochym@ifj.edu.pl>
#
# This file is part of Elastic.
# Elastic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Elastic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Elastic. If not, see <http://www.gnu.org/licenses/>.
'''
.. _par-calc-mod:
Parallel Calculator Module
^^^^^^^^^^^^^^^^^^^^^^^^^^
Parallel calculator module is an extension of the standard
`ASE <https://wiki.fysik.dtu.dk/ase/>`_ calculator working in the
parallel cluster environment. It is very useful in all situations where
you need to run several, independent calculations and you have a large
cluster of machines at your disposal (probably with some queuing system).
This implementation uses VASP but the code can be easily adapted for use
with other ASE calculators with minor changes.
The final goal is to provide a universal module for parallel
calculator execution in the cluster environment.
The SIESTA code by Georgios Tritsaris <gtritsaris@seas.harvard.edu>
Not fully tested after merge.
'''
from __future__ import print_function, division
import logging
import ase
from ase.calculators.vasp import Vasp
from ase.calculators.siesta import Siesta
from ase.calculators.aims import Aims
from ase.calculators.calculator import Calculator, FileIOCalculator, all_changes
try : # Python3
from queue import Empty
except ImportError : # Python2
from Queue import Empty
from multiprocessing import Process, Queue
import time
import os
import tempfile
import shutil
from copy import deepcopy
from subprocess import check_output
from contextlib import contextmanager
class _NonBlockingRunException(Exception):
'''
Internal exception. Should never be propagated outside.
'''
def __str__(self):
return '''The __NonBlockingRunException should be caught inside
the calculator class. If you got it outside it is a bug.
Contact the author and/or submit a bug ticket at github.'''
from traceback import print_stack
@contextmanager
def work_dir(path):
'''
Context menager for executing commands in some working directory.
Returns to the previous wd when finished.
Usage:
>>> with work_dir(path):
... subprocess.call('git status')
'''
starting_directory = os.getcwd()
try:
os.chdir(path)
yield
finally:
os.chdir(starting_directory)
class ClusterVasp(Vasp):
'''
Adaptation of VASP calculator to the cluster environment where you often
have to make some preparations before job submission. You can easily
adapt this class to your particular environment. It is also easy to
use this as a template for other type of calculator.
'''
def __init__(self, nodes=1, ppn=8, block=True, ncl=False, **kwargs):
Vasp.__init__(self, **kwargs)
self.nodes=nodes
self.ppn=ppn
self.block=block
self.ncl=ncl
self.calc_running=False
self.working_dir=os.getcwd()
def prepare_calc_dir(self):
'''
Prepare the calculation directory for VASP execution.
This needs to be re-implemented for each local setup.
The following code reflects just my particular setup.
'''
with open("vasprun.conf","w") as f:
f.write('NODES="nodes=%s:ppn=%d"\n' % (self.nodes, self.ppn))
f.write('BLOCK=%d\n' % (self.block,))
if self.ncl :
f.write('NCL=%d\n' % (1,))
#print(self.nodes, self.ppn)
def calc_finished(self):
'''
Check if the lockfile is in the calculation directory.
It is removed by the script at the end regardless of the
success of the calculation. This is totally tied to
implementation and you need to implement your own scheme!
'''
#print_stack(limit=5)
if not self.calc_running :
#print('Calc running:',self.calc_running)
return True
else:
# The calc is marked as running check if this is still true
# We do it by external scripts. You need to write these
# scripts for your own system.
# See examples/scripts directory for examples.
with work_dir(self.working_dir) :
o=check_output(['check-job'])
#print('Status',o)
if o[0] in b'R' :
# Still running - we do nothing to preserve the state
return False
else :
# The job is not running maybe it finished maybe crashed
# We hope for the best at this point ad pass to the
# Standard update function
return True
def set(self,**kwargs):
if 'block' in kwargs :
self.block=kwargs['block']
del kwargs['block']
else :
self.block=True
if 'ncl' in kwargs :
self.ncl=kwargs['ncl']
del kwargs['ncl']
else :
self.ncl=False
Vasp.set(self, **kwargs)
def clean(self):
with work_dir(self.working_dir) :
Vasp.clean(self)
def update(self, atoms):
if self.calc_running :
# we have started the calculation and have
# nothing to read really. But we need to check
# first if this is still true.
if self.calc_finished():
# We were running but recently finished => read the results
# This is a piece of copy-and-paste programming
# This is a copy of code from Vasp.calculate
self.calc_running=False
with work_dir(self.working_dir) :
atoms_sorted = ase.io.read('CONTCAR', format='vasp')
if self.int_params['ibrion'] > -1 and self.int_params['nsw'] > 0:
# Update atomic positions and unit cell with the ones read
# from CONTCAR.
atoms.positions = atoms_sorted[self.resort].positions
atoms.cell = atoms_sorted.cell
self.converged = self.read_convergence()
Vasp.set_results(self,atoms)
return
else :
return
# We are not in the middle of calculation.
# Update as normal
Vasp.update(self, atoms)
def set_results(self, atoms):
with work_dir(self.working_dir) :
#print('set_results')
Vasp.set_results(self, atoms)
def run(self):
'''
Blocking/Non-blocing run method.
In blocking mode it just runs parent run method.
In non-blocking mode it raises the __NonBlockingRunException
to bail out of the processing of standard calculate method
(or any other method in fact) and signal that the data is not
ready to be collected.
'''
# This is only called from self.calculate - thus
# we do not need to change to working_dir
# since calculate already did
Vasp.run(self)
if not self.block :
#print('Interrupt processing of calculate', os.getcwd())
raise _NonBlockingRunException
def calculate(self, atoms):
'''
Blocking/Non-blocking calculate method
If we are in blocking mode we just run, wait for
the job to end and read in the results. Easy ...
The non-blocking mode is a little tricky.
We need to start the job and guard against it reading
back possible old data from the directory - the queuing
system may not even started the job when we get control
back from the starting script. Thus anything we read
after invocation is potentially garbage - even if it
is a converged calculation data.
We handle it by custom run function above which
raises an exception after submitting the job.
This skips post-run processing in the calculator, preserves
the state of the data and signals here that we need to wait
for results.
'''
with work_dir(self.working_dir) :
self.prepare_calc_dir()
self.calc_running=True
#print('Run VASP.calculate')
try :
Vasp.calculate(self, atoms)
self.calc_running=False
#print('VASP.calculate returned')
except _NonBlockingRunException as e:
# We have nothing else to docs
# until the job finishes
#print('Interrupted ', self.working_dir, os.getcwd())
pass
class ClusterSiesta(Siesta):
'''
Siesta calculator. Not fully tested by me - so this should be considered
beta quality. Nevertheless it is based on working implementation
'''
def __init__(self, nodes=1, ppn=8, **kwargs):
Siesta.__init__(self, **kwargs)
self.nodes=nodes
self.ppn=ppn
def prepare_calc_dir(self):
with open("siestarun.conf","w") as f:
f.write('NODES="nodes=%d:ppn=%d"' % (self.nodes, self.ppn))
#print(self.nodes, self.ppn)
def get_potential_energy(self, atoms):
self.prepare_calc_dir()
Siesta.get_potential_energy(self, atoms)
def clean(self):
self.converged = False
return
class ClusterAims(Aims):
'''
Encapsulating Aims calculator for the cluster environment.
'''
def __init__(self, nodes=1, ppn=8, **kwargs):
Aims.__init__(self, **kwargs)
self.nodes=nodes
self.ppn=ppn
def prepare_calc_dir(self):
with open("siestarun.conf","w") as f:
f.write('NODES="nodes=%d:ppn=%d"' % (self.nodes, self.ppn))
#print(self.nodes, self.ppn)
def run(self):
self.prepare_calc_dir()
Aims.run(self)
class RemoteCalculator(Calculator):
'''
Remote calculator based on ASE calculator class.
This class is only involved with the machanics of remotly executing
the software and transporting the data. The calculation is
delegated to the actual calculator class.
'''
# Queue system submit command
qsub_tool='qsub'
qstat_tool='qstat'
qsub_cmd='cd %(rdir)s ; %(qsub_tool)s -N %(title)s -l procs=%(procs)d ./run-pw.pbs'
# Remote execution command
remote_exec_cmd='ssh %(user)s@%(host)s "%(command)s"'
# If you cannot mount the data directory into your system it is best
# to use the rsync command to transfer the results back into the system.
# Command for copying the data out to the computing system
copy_out_cmd='rsync -a "%(ldir)s" "%(user)s@%(host)s:%(rdir)s"'
# Command for copying the data in after the calculation
copy_in_cmd='rsync -a "%(user)s@%(host)s:%(rdir)s" "%(ldir)s"'
# Template for the PBS batch job
pbs_template=''
# Command to check the state of the job
pbs_check_cmd='''%(qstat_tool)s -f %(jobid)s |grep job_state |awk '{print $3}' '''
# Access data
host=''
user=''
# Location:
# local working directory
wdir='.'
# Remote working directory relative to the home directory or absolute
rdir='.'
# Repetition timer (seconds) for checkin the state of the job.
job_check_time=15
def __init__(self, restart=None, ignore_bad_restart_file=False, label=None,
atoms=None, calc=None, block=False, **kwargs):
'''Basic calculator implementation.
restart: str
Prefix for restart file. May contain a directory. Default
is None: don't restart.
ignore_bad_restart_file: bool
Ignore broken or missing restart file. By default, it is an
error if the restart file is missing or broken.
label: str
Name used for all files. May contain a directory.
atoms: Atoms object
Optional Atoms object to which the calculator will be
attached. When restarting, atoms will get its positions and
unit-cell updated from file.
Create a remote execution calculator based on actual ASE calculator
calc.
'''
logging.debug("Calc: %s Label: %s" % (calc, label))
Calculator.__init__(self, restart, ignore_bad_restart_file, label, atoms, **kwargs)
logging.debug("Dir: %s Ext: %s" % (self.directory, self.ext))
self.calc=calc
self.jobid=None
self.block=block
def write_pbs_in(self,properties):
with work_dir(self.directory):
with open(os.path.join(self.directory,'run-ase-calc.pbs'),'w') as fh:
fh.write(self.pbs_template % {
'command': self.build_command(self,prop=properties,
params=self.parameters)
})
def build_command(self,prop=['energy'],params={}):
cmd=self.qsub_cmd % {
'qsub_tool': self.qsub_tool,
'qstat_tool': self.qstat_tool,
'title': self.label,
'procs': self.parameters['procs'],
'rdir': os.path.join(self.parameters['rdir'],os.path.split(self.directory)[-1])
}
cmd=self.remote_exec_cmd % {
'command': cmd,
'user': self.parameters['user'],
'host': self.parameters['host']
}
return cmd
def write_input(self, atoms=None, properties=['energy'], system_changes=all_changes):
'''Write input file(s).'''
with work_dir(self.directory):
self.calc.write_input(self, atoms, properties, system_changes)
self.write_pbs_in(properties)
subprocess.call(self.copy_out_cmd % {
'ldir': self.directory,
'rdir': self.parameters['rdir'],
'user': self.parameters['user'],
'host': self.parameters['host']
}, shell=True)
def job_ready(self):
try :
cmd=self.remote_exec_cmd % {
'command': self.pbs_check_cmd % {
'qsub_tool': self.qsub_tool,
'qstat_tool': self.qstat_tool,
'jobid':self.jobid
},
'user': self.parameters['user'],
'host': self.parameters['host']
}
state=subprocess.check_output(cmd, shell=True).split()[-1]
except (subprocess.CalledProcessError, IndexError) :
# Unknown state. We assume it has finished and continue
state='N'
return not (state in ['Q','R'])
def run_calculation(self, atoms=None, properties=['energy'],
system_changes=all_changes):
'''
Internal calculation executor. We cannot use FileIOCalculator
directly since we need to support remote execution.
This calculator is different from others.
It prepares the directory, launches the remote process and
raises the exception to signal that we need to come back for results
when the job is finished.
'''
self.calc.calculate(self, atoms, properties, system_changes)
self.write_input(self.atoms, properties, system_changes)
if self.command is None:
raise RuntimeError('Please configure Remote calculator!')
olddir = os.getcwd()
errorcode=0
try:
os.chdir(self.directory)
output = subprocess.check_output(self.command, shell=True)
self.jobid=output.split()[0]
self.submited=True
#print "Job %s submitted. Waiting for it." % (self.jobid)
# Waiting loop. To be removed.
except subprocess.CalledProcessError as e:
errorcode=e.returncode
finally:
os.chdir(olddir)
if errorcode:
raise RuntimeError('%s returned an error: %d' %
(self.name, errorcode))
self.read_results()
def read_results(self):
"""Read energy, forces, ... from output file(s)."""
if self.submited:
# The job has been submitted. Check the state.
if not self.job_ready() :
if self.block :
while not self.job_ready() :
time.sleep(self.job_check_time)
else :
raise CalcNotReadyError
# Assume the calc finished. Copy the files back.
subprocess.call(self.copy_in_cmd % {
'ldir': self.wdir,
'rdir': os.path.join(self.parameters['rdir'],os.path.split(self.directory)[-1]),
'user': self.parameters['user'],
'host': self.parameters['host']
}, shell=True)
fn=os.path.join(self.directory,'pw.out')
# Read the pan-ultimate line of the output file
try:
ln=open(fn).readlines()[-2]
if ln.find('JOB DONE.')>-1 :
# Job is done we can read the output
r=read_quantumespresso_textoutput(fn)
self.submited=False
self.jobid=None
else :
# Job not ready.
raise CalcNotReadyError
except (IOError, IndexError) :
# Job not ready.
raise CalcNotReadyError
# All is fine - really read the results
self.calc.read_results(self)
@classmethod
def ParallelCalculate(cls,syslst,properties=['energy'],system_changes=all_changes):
'''
Run a series of calculations in parallel using (implicitely) some
remote machine/cluster. The function returns the list of systems ready
for the extraction of calculated properties.
'''
print('Launching:',end=' ')
sys.stdout.flush()
for n,s in enumerate(syslst):
try :
s.calc.block=False
s.calc.calculate(atoms=s,properties=properties,system_changes=system_changes)
except CalcNotReadyError:
s.calc.block=True
print(n+1, end=' ')
sys.stdout.flush()
print()
print(' Done:', end=' ')
sys.stdout.flush()
for n,s in enumerate(syslst):
s.calc.read_results()
print( n+1, end=' ')
sys.stdout.flush()
print()
return syslst
verbose=True
class __PCalcProc(Process):
'''
Internal helper class representing the calculation process isolated
from the rest of the ASE script. The process (not thread) runs in
the separate directory, created on-the-fly and removed at the end
if the cleanup is true and we are in blocking (default) mode.
In this mode it is vital for the calculator to read in all the
results after the run since the files will be removed as soon as the
"calculate" function terminates. You can pass False to the cleanup
argument to prevent the clean-up. This is very usefull for debuging.
The process waits without any time-out for the calculation to finish.
This is great for short and simple calculations and quick testing.
You run the job and get back your results.
'''
def __init__(self, iq, oq, calc, prefix, cleanup=True):
Process.__init__(self)
self.calc=calc
self.basedir=os.getcwd()
self.place=tempfile.mkdtemp(prefix=prefix, dir=self.basedir)
self.iq=iq
self.oq=oq
self.CleanUp=cleanup
def run(self):
with work_dir(self.place) :
n,system=self.iq.get()
system.set_calculator(deepcopy(self.calc))
system.get_calculator().block=True
system.get_calculator().working_dir=self.place
#print("Start at :", self.place)
if hasattr(self.calc, 'name') and self.calc.name=='Siesta':
system.get_potential_energy()
else:
system.get_calculator().calculate(system)
#print("Finito: ", os.getcwd(), system.get_volume(), system.get_pressure())
self.oq.put([n,system])
if self.CleanUp :
system.get_calculator().clean()
os.chdir(self.basedir)
shutil.rmtree(self.place, ignore_errors=True)
# Testing routines using VASP as a calculator in the cluster environment.
# TODO: Make it calculator/environment agnostic
if __name__ == '__main__':
from ase.lattice.spacegroup import crystal
from ase.units import GPa
import elastic
import numpy
from pylab import *
a = 4.291
MgO = crystal(['Mg', 'O'], [(0, 0, 0), (0.5, 0.5, 0.5)], spacegroup=225,
cellpar=[a, a, a, 90, 90, 90])
##################################
# Provide your own calculator here
##################################
calc=ClusterVasp(nodes=1,ppn=8)
# The calculator must be runnable in an isolated directory
# Without disturbing other running instances of the same calculator
# They are run in separate processes (not threads!)
MgO.set_calculator(calc)
calc.set(prec = 'Accurate', xc = 'PBE', lreal = False, isif=2, nsw=20, ibrion=2, kpts=[1,1,1])
print("Residual pressure: %.3f GPa" % (MgO.get_pressure()/GPa))
calc.clean()
systems=[]
for av in numpy.linspace(a*0.95,a*1.05,5):
systems.append(crystal(['Mg', 'O'], [(0, 0, 0), (0.5, 0.5, 0.5)], spacegroup=225,
cellpar=[av, av, av, 90, 90, 90]))
pcalc=ClusterVasp(nodes=1,ppn=8)
pcalc.set(prec = 'Accurate', xc = 'PBE', lreal = False, isif=2, nsw=20, ibrion=2, kpts=[1,1,1])
res=ParCalculate(systems,pcalc)
v=[]
p=[]
for s in res :
v.append(s.get_volume())
p.append(s.get_pressure()/GPa)
plot(v,p,'o')
show()
|
jochym/Elastic | parcalc/parcalc.py | ClusterVasp.prepare_calc_dir | python | def prepare_calc_dir(self):
'''
Prepare the calculation directory for VASP execution.
This needs to be re-implemented for each local setup.
The following code reflects just my particular setup.
'''
with open("vasprun.conf","w") as f:
f.write('NODES="nodes=%s:ppn=%d"\n' % (self.nodes, self.ppn))
f.write('BLOCK=%d\n' % (self.block,))
if self.ncl :
f.write('NCL=%d\n' % (1,)) | Prepare the calculation directory for VASP execution.
This needs to be re-implemented for each local setup.
The following code reflects just my particular setup. | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/parcalc/parcalc.py#L114-L124 | null | class ClusterVasp(Vasp):
'''
Adaptation of VASP calculator to the cluster environment where you often
have to make some preparations before job submission. You can easily
adapt this class to your particular environment. It is also easy to
use this as a template for other type of calculator.
'''
def __init__(self, nodes=1, ppn=8, block=True, ncl=False, **kwargs):
Vasp.__init__(self, **kwargs)
self.nodes=nodes
self.ppn=ppn
self.block=block
self.ncl=ncl
self.calc_running=False
self.working_dir=os.getcwd()
#print(self.nodes, self.ppn)
def calc_finished(self):
'''
Check if the lockfile is in the calculation directory.
It is removed by the script at the end regardless of the
success of the calculation. This is totally tied to
implementation and you need to implement your own scheme!
'''
#print_stack(limit=5)
if not self.calc_running :
#print('Calc running:',self.calc_running)
return True
else:
# The calc is marked as running check if this is still true
# We do it by external scripts. You need to write these
# scripts for your own system.
# See examples/scripts directory for examples.
with work_dir(self.working_dir) :
o=check_output(['check-job'])
#print('Status',o)
if o[0] in b'R' :
# Still running - we do nothing to preserve the state
return False
else :
# The job is not running maybe it finished maybe crashed
# We hope for the best at this point ad pass to the
# Standard update function
return True
def set(self,**kwargs):
if 'block' in kwargs :
self.block=kwargs['block']
del kwargs['block']
else :
self.block=True
if 'ncl' in kwargs :
self.ncl=kwargs['ncl']
del kwargs['ncl']
else :
self.ncl=False
Vasp.set(self, **kwargs)
def clean(self):
with work_dir(self.working_dir) :
Vasp.clean(self)
def update(self, atoms):
if self.calc_running :
# we have started the calculation and have
# nothing to read really. But we need to check
# first if this is still true.
if self.calc_finished():
# We were running but recently finished => read the results
# This is a piece of copy-and-paste programming
# This is a copy of code from Vasp.calculate
self.calc_running=False
with work_dir(self.working_dir) :
atoms_sorted = ase.io.read('CONTCAR', format='vasp')
if self.int_params['ibrion'] > -1 and self.int_params['nsw'] > 0:
# Update atomic positions and unit cell with the ones read
# from CONTCAR.
atoms.positions = atoms_sorted[self.resort].positions
atoms.cell = atoms_sorted.cell
self.converged = self.read_convergence()
Vasp.set_results(self,atoms)
return
else :
return
# We are not in the middle of calculation.
# Update as normal
Vasp.update(self, atoms)
def set_results(self, atoms):
with work_dir(self.working_dir) :
#print('set_results')
Vasp.set_results(self, atoms)
def run(self):
'''
Blocking/Non-blocing run method.
In blocking mode it just runs parent run method.
In non-blocking mode it raises the __NonBlockingRunException
to bail out of the processing of standard calculate method
(or any other method in fact) and signal that the data is not
ready to be collected.
'''
# This is only called from self.calculate - thus
# we do not need to change to working_dir
# since calculate already did
Vasp.run(self)
if not self.block :
#print('Interrupt processing of calculate', os.getcwd())
raise _NonBlockingRunException
def calculate(self, atoms):
'''
Blocking/Non-blocking calculate method
If we are in blocking mode we just run, wait for
the job to end and read in the results. Easy ...
The non-blocking mode is a little tricky.
We need to start the job and guard against it reading
back possible old data from the directory - the queuing
system may not even started the job when we get control
back from the starting script. Thus anything we read
after invocation is potentially garbage - even if it
is a converged calculation data.
We handle it by custom run function above which
raises an exception after submitting the job.
This skips post-run processing in the calculator, preserves
the state of the data and signals here that we need to wait
for results.
'''
with work_dir(self.working_dir) :
self.prepare_calc_dir()
self.calc_running=True
#print('Run VASP.calculate')
try :
Vasp.calculate(self, atoms)
self.calc_running=False
#print('VASP.calculate returned')
except _NonBlockingRunException as e:
# We have nothing else to docs
# until the job finishes
#print('Interrupted ', self.working_dir, os.getcwd())
pass
|
jochym/Elastic | parcalc/parcalc.py | ClusterVasp.calc_finished | python | def calc_finished(self):
'''
Check if the lockfile is in the calculation directory.
It is removed by the script at the end regardless of the
success of the calculation. This is totally tied to
implementation and you need to implement your own scheme!
'''
#print_stack(limit=5)
if not self.calc_running :
#print('Calc running:',self.calc_running)
return True
else:
# The calc is marked as running check if this is still true
# We do it by external scripts. You need to write these
# scripts for your own system.
# See examples/scripts directory for examples.
with work_dir(self.working_dir) :
o=check_output(['check-job'])
#print('Status',o)
if o[0] in b'R' :
# Still running - we do nothing to preserve the state
return False
else :
# The job is not running maybe it finished maybe crashed
# We hope for the best at this point ad pass to the
# Standard update function
return True | Check if the lockfile is in the calculation directory.
It is removed by the script at the end regardless of the
success of the calculation. This is totally tied to
implementation and you need to implement your own scheme! | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/parcalc/parcalc.py#L127-L153 | null | class ClusterVasp(Vasp):
'''
Adaptation of VASP calculator to the cluster environment where you often
have to make some preparations before job submission. You can easily
adapt this class to your particular environment. It is also easy to
use this as a template for other type of calculator.
'''
def __init__(self, nodes=1, ppn=8, block=True, ncl=False, **kwargs):
Vasp.__init__(self, **kwargs)
self.nodes=nodes
self.ppn=ppn
self.block=block
self.ncl=ncl
self.calc_running=False
self.working_dir=os.getcwd()
def prepare_calc_dir(self):
'''
Prepare the calculation directory for VASP execution.
This needs to be re-implemented for each local setup.
The following code reflects just my particular setup.
'''
with open("vasprun.conf","w") as f:
f.write('NODES="nodes=%s:ppn=%d"\n' % (self.nodes, self.ppn))
f.write('BLOCK=%d\n' % (self.block,))
if self.ncl :
f.write('NCL=%d\n' % (1,))
#print(self.nodes, self.ppn)
def set(self,**kwargs):
if 'block' in kwargs :
self.block=kwargs['block']
del kwargs['block']
else :
self.block=True
if 'ncl' in kwargs :
self.ncl=kwargs['ncl']
del kwargs['ncl']
else :
self.ncl=False
Vasp.set(self, **kwargs)
def clean(self):
with work_dir(self.working_dir) :
Vasp.clean(self)
def update(self, atoms):
if self.calc_running :
# we have started the calculation and have
# nothing to read really. But we need to check
# first if this is still true.
if self.calc_finished():
# We were running but recently finished => read the results
# This is a piece of copy-and-paste programming
# This is a copy of code from Vasp.calculate
self.calc_running=False
with work_dir(self.working_dir) :
atoms_sorted = ase.io.read('CONTCAR', format='vasp')
if self.int_params['ibrion'] > -1 and self.int_params['nsw'] > 0:
# Update atomic positions and unit cell with the ones read
# from CONTCAR.
atoms.positions = atoms_sorted[self.resort].positions
atoms.cell = atoms_sorted.cell
self.converged = self.read_convergence()
Vasp.set_results(self,atoms)
return
else :
return
# We are not in the middle of calculation.
# Update as normal
Vasp.update(self, atoms)
def set_results(self, atoms):
with work_dir(self.working_dir) :
#print('set_results')
Vasp.set_results(self, atoms)
def run(self):
'''
Blocking/Non-blocing run method.
In blocking mode it just runs parent run method.
In non-blocking mode it raises the __NonBlockingRunException
to bail out of the processing of standard calculate method
(or any other method in fact) and signal that the data is not
ready to be collected.
'''
# This is only called from self.calculate - thus
# we do not need to change to working_dir
# since calculate already did
Vasp.run(self)
if not self.block :
#print('Interrupt processing of calculate', os.getcwd())
raise _NonBlockingRunException
def calculate(self, atoms):
'''
Blocking/Non-blocking calculate method
If we are in blocking mode we just run, wait for
the job to end and read in the results. Easy ...
The non-blocking mode is a little tricky.
We need to start the job and guard against it reading
back possible old data from the directory - the queuing
system may not even started the job when we get control
back from the starting script. Thus anything we read
after invocation is potentially garbage - even if it
is a converged calculation data.
We handle it by custom run function above which
raises an exception after submitting the job.
This skips post-run processing in the calculator, preserves
the state of the data and signals here that we need to wait
for results.
'''
with work_dir(self.working_dir) :
self.prepare_calc_dir()
self.calc_running=True
#print('Run VASP.calculate')
try :
Vasp.calculate(self, atoms)
self.calc_running=False
#print('VASP.calculate returned')
except _NonBlockingRunException as e:
# We have nothing else to docs
# until the job finishes
#print('Interrupted ', self.working_dir, os.getcwd())
pass
|
jochym/Elastic | parcalc/parcalc.py | ClusterVasp.calculate | python | def calculate(self, atoms):
'''
Blocking/Non-blocking calculate method
If we are in blocking mode we just run, wait for
the job to end and read in the results. Easy ...
The non-blocking mode is a little tricky.
We need to start the job and guard against it reading
back possible old data from the directory - the queuing
system may not even started the job when we get control
back from the starting script. Thus anything we read
after invocation is potentially garbage - even if it
is a converged calculation data.
We handle it by custom run function above which
raises an exception after submitting the job.
This skips post-run processing in the calculator, preserves
the state of the data and signals here that we need to wait
for results.
'''
with work_dir(self.working_dir) :
self.prepare_calc_dir()
self.calc_running=True
#print('Run VASP.calculate')
try :
Vasp.calculate(self, atoms)
self.calc_running=False
#print('VASP.calculate returned')
except _NonBlockingRunException as e:
# We have nothing else to docs
# until the job finishes
#print('Interrupted ', self.working_dir, os.getcwd())
pass | Blocking/Non-blocking calculate method
If we are in blocking mode we just run, wait for
the job to end and read in the results. Easy ...
The non-blocking mode is a little tricky.
We need to start the job and guard against it reading
back possible old data from the directory - the queuing
system may not even started the job when we get control
back from the starting script. Thus anything we read
after invocation is potentially garbage - even if it
is a converged calculation data.
We handle it by custom run function above which
raises an exception after submitting the job.
This skips post-run processing in the calculator, preserves
the state of the data and signals here that we need to wait
for results. | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/parcalc/parcalc.py#L221-L255 | null | class ClusterVasp(Vasp):
'''
Adaptation of VASP calculator to the cluster environment where you often
have to make some preparations before job submission. You can easily
adapt this class to your particular environment. It is also easy to
use this as a template for other type of calculator.
'''
def __init__(self, nodes=1, ppn=8, block=True, ncl=False, **kwargs):
Vasp.__init__(self, **kwargs)
self.nodes=nodes
self.ppn=ppn
self.block=block
self.ncl=ncl
self.calc_running=False
self.working_dir=os.getcwd()
def prepare_calc_dir(self):
'''
Prepare the calculation directory for VASP execution.
This needs to be re-implemented for each local setup.
The following code reflects just my particular setup.
'''
with open("vasprun.conf","w") as f:
f.write('NODES="nodes=%s:ppn=%d"\n' % (self.nodes, self.ppn))
f.write('BLOCK=%d\n' % (self.block,))
if self.ncl :
f.write('NCL=%d\n' % (1,))
#print(self.nodes, self.ppn)
def calc_finished(self):
'''
Check if the lockfile is in the calculation directory.
It is removed by the script at the end regardless of the
success of the calculation. This is totally tied to
implementation and you need to implement your own scheme!
'''
#print_stack(limit=5)
if not self.calc_running :
#print('Calc running:',self.calc_running)
return True
else:
# The calc is marked as running check if this is still true
# We do it by external scripts. You need to write these
# scripts for your own system.
# See examples/scripts directory for examples.
with work_dir(self.working_dir) :
o=check_output(['check-job'])
#print('Status',o)
if o[0] in b'R' :
# Still running - we do nothing to preserve the state
return False
else :
# The job is not running maybe it finished maybe crashed
# We hope for the best at this point ad pass to the
# Standard update function
return True
def set(self,**kwargs):
if 'block' in kwargs :
self.block=kwargs['block']
del kwargs['block']
else :
self.block=True
if 'ncl' in kwargs :
self.ncl=kwargs['ncl']
del kwargs['ncl']
else :
self.ncl=False
Vasp.set(self, **kwargs)
def clean(self):
with work_dir(self.working_dir) :
Vasp.clean(self)
def update(self, atoms):
if self.calc_running :
# we have started the calculation and have
# nothing to read really. But we need to check
# first if this is still true.
if self.calc_finished():
# We were running but recently finished => read the results
# This is a piece of copy-and-paste programming
# This is a copy of code from Vasp.calculate
self.calc_running=False
with work_dir(self.working_dir) :
atoms_sorted = ase.io.read('CONTCAR', format='vasp')
if self.int_params['ibrion'] > -1 and self.int_params['nsw'] > 0:
# Update atomic positions and unit cell with the ones read
# from CONTCAR.
atoms.positions = atoms_sorted[self.resort].positions
atoms.cell = atoms_sorted.cell
self.converged = self.read_convergence()
Vasp.set_results(self,atoms)
return
else :
return
# We are not in the middle of calculation.
# Update as normal
Vasp.update(self, atoms)
def set_results(self, atoms):
with work_dir(self.working_dir) :
#print('set_results')
Vasp.set_results(self, atoms)
def run(self):
'''
Blocking/Non-blocing run method.
In blocking mode it just runs parent run method.
In non-blocking mode it raises the __NonBlockingRunException
to bail out of the processing of standard calculate method
(or any other method in fact) and signal that the data is not
ready to be collected.
'''
# This is only called from self.calculate - thus
# we do not need to change to working_dir
# since calculate already did
Vasp.run(self)
if not self.block :
#print('Interrupt processing of calculate', os.getcwd())
raise _NonBlockingRunException
|
jochym/Elastic | parcalc/parcalc.py | RemoteCalculator.write_input | python | def write_input(self, atoms=None, properties=['energy'], system_changes=all_changes):
'''Write input file(s).'''
with work_dir(self.directory):
self.calc.write_input(self, atoms, properties, system_changes)
self.write_pbs_in(properties)
subprocess.call(self.copy_out_cmd % {
'ldir': self.directory,
'rdir': self.parameters['rdir'],
'user': self.parameters['user'],
'host': self.parameters['host']
}, shell=True) | Write input file(s). | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/parcalc/parcalc.py#L394-L404 | null | class RemoteCalculator(Calculator):
'''
Remote calculator based on ASE calculator class.
This class is only involved with the machanics of remotly executing
the software and transporting the data. The calculation is
delegated to the actual calculator class.
'''
# Queue system submit command
qsub_tool='qsub'
qstat_tool='qstat'
qsub_cmd='cd %(rdir)s ; %(qsub_tool)s -N %(title)s -l procs=%(procs)d ./run-pw.pbs'
# Remote execution command
remote_exec_cmd='ssh %(user)s@%(host)s "%(command)s"'
# If you cannot mount the data directory into your system it is best
# to use the rsync command to transfer the results back into the system.
# Command for copying the data out to the computing system
copy_out_cmd='rsync -a "%(ldir)s" "%(user)s@%(host)s:%(rdir)s"'
# Command for copying the data in after the calculation
copy_in_cmd='rsync -a "%(user)s@%(host)s:%(rdir)s" "%(ldir)s"'
# Template for the PBS batch job
pbs_template=''
# Command to check the state of the job
pbs_check_cmd='''%(qstat_tool)s -f %(jobid)s |grep job_state |awk '{print $3}' '''
# Access data
host=''
user=''
# Location:
# local working directory
wdir='.'
# Remote working directory relative to the home directory or absolute
rdir='.'
# Repetition timer (seconds) for checkin the state of the job.
job_check_time=15
def __init__(self, restart=None, ignore_bad_restart_file=False, label=None,
atoms=None, calc=None, block=False, **kwargs):
'''Basic calculator implementation.
restart: str
Prefix for restart file. May contain a directory. Default
is None: don't restart.
ignore_bad_restart_file: bool
Ignore broken or missing restart file. By default, it is an
error if the restart file is missing or broken.
label: str
Name used for all files. May contain a directory.
atoms: Atoms object
Optional Atoms object to which the calculator will be
attached. When restarting, atoms will get its positions and
unit-cell updated from file.
Create a remote execution calculator based on actual ASE calculator
calc.
'''
logging.debug("Calc: %s Label: %s" % (calc, label))
Calculator.__init__(self, restart, ignore_bad_restart_file, label, atoms, **kwargs)
logging.debug("Dir: %s Ext: %s" % (self.directory, self.ext))
self.calc=calc
self.jobid=None
self.block=block
def write_pbs_in(self,properties):
with work_dir(self.directory):
with open(os.path.join(self.directory,'run-ase-calc.pbs'),'w') as fh:
fh.write(self.pbs_template % {
'command': self.build_command(self,prop=properties,
params=self.parameters)
})
def build_command(self,prop=['energy'],params={}):
cmd=self.qsub_cmd % {
'qsub_tool': self.qsub_tool,
'qstat_tool': self.qstat_tool,
'title': self.label,
'procs': self.parameters['procs'],
'rdir': os.path.join(self.parameters['rdir'],os.path.split(self.directory)[-1])
}
cmd=self.remote_exec_cmd % {
'command': cmd,
'user': self.parameters['user'],
'host': self.parameters['host']
}
return cmd
def job_ready(self):
try :
cmd=self.remote_exec_cmd % {
'command': self.pbs_check_cmd % {
'qsub_tool': self.qsub_tool,
'qstat_tool': self.qstat_tool,
'jobid':self.jobid
},
'user': self.parameters['user'],
'host': self.parameters['host']
}
state=subprocess.check_output(cmd, shell=True).split()[-1]
except (subprocess.CalledProcessError, IndexError) :
# Unknown state. We assume it has finished and continue
state='N'
return not (state in ['Q','R'])
def run_calculation(self, atoms=None, properties=['energy'],
system_changes=all_changes):
'''
Internal calculation executor. We cannot use FileIOCalculator
directly since we need to support remote execution.
This calculator is different from others.
It prepares the directory, launches the remote process and
raises the exception to signal that we need to come back for results
when the job is finished.
'''
self.calc.calculate(self, atoms, properties, system_changes)
self.write_input(self.atoms, properties, system_changes)
if self.command is None:
raise RuntimeError('Please configure Remote calculator!')
olddir = os.getcwd()
errorcode=0
try:
os.chdir(self.directory)
output = subprocess.check_output(self.command, shell=True)
self.jobid=output.split()[0]
self.submited=True
#print "Job %s submitted. Waiting for it." % (self.jobid)
# Waiting loop. To be removed.
except subprocess.CalledProcessError as e:
errorcode=e.returncode
finally:
os.chdir(olddir)
if errorcode:
raise RuntimeError('%s returned an error: %d' %
(self.name, errorcode))
self.read_results()
def read_results(self):
"""Read energy, forces, ... from output file(s)."""
if self.submited:
# The job has been submitted. Check the state.
if not self.job_ready() :
if self.block :
while not self.job_ready() :
time.sleep(self.job_check_time)
else :
raise CalcNotReadyError
# Assume the calc finished. Copy the files back.
subprocess.call(self.copy_in_cmd % {
'ldir': self.wdir,
'rdir': os.path.join(self.parameters['rdir'],os.path.split(self.directory)[-1]),
'user': self.parameters['user'],
'host': self.parameters['host']
}, shell=True)
fn=os.path.join(self.directory,'pw.out')
# Read the pan-ultimate line of the output file
try:
ln=open(fn).readlines()[-2]
if ln.find('JOB DONE.')>-1 :
# Job is done we can read the output
r=read_quantumespresso_textoutput(fn)
self.submited=False
self.jobid=None
else :
# Job not ready.
raise CalcNotReadyError
except (IOError, IndexError) :
# Job not ready.
raise CalcNotReadyError
# All is fine - really read the results
self.calc.read_results(self)
@classmethod
def ParallelCalculate(cls,syslst,properties=['energy'],system_changes=all_changes):
'''
Run a series of calculations in parallel using (implicitely) some
remote machine/cluster. The function returns the list of systems ready
for the extraction of calculated properties.
'''
print('Launching:',end=' ')
sys.stdout.flush()
for n,s in enumerate(syslst):
try :
s.calc.block=False
s.calc.calculate(atoms=s,properties=properties,system_changes=system_changes)
except CalcNotReadyError:
s.calc.block=True
print(n+1, end=' ')
sys.stdout.flush()
print()
print(' Done:', end=' ')
sys.stdout.flush()
for n,s in enumerate(syslst):
s.calc.read_results()
print( n+1, end=' ')
sys.stdout.flush()
print()
return syslst
|
jochym/Elastic | parcalc/parcalc.py | RemoteCalculator.run_calculation | python | def run_calculation(self, atoms=None, properties=['energy'],
system_changes=all_changes):
'''
Internal calculation executor. We cannot use FileIOCalculator
directly since we need to support remote execution.
This calculator is different from others.
It prepares the directory, launches the remote process and
raises the exception to signal that we need to come back for results
when the job is finished.
'''
self.calc.calculate(self, atoms, properties, system_changes)
self.write_input(self.atoms, properties, system_changes)
if self.command is None:
raise RuntimeError('Please configure Remote calculator!')
olddir = os.getcwd()
errorcode=0
try:
os.chdir(self.directory)
output = subprocess.check_output(self.command, shell=True)
self.jobid=output.split()[0]
self.submited=True
#print "Job %s submitted. Waiting for it." % (self.jobid)
# Waiting loop. To be removed.
except subprocess.CalledProcessError as e:
errorcode=e.returncode
finally:
os.chdir(olddir)
if errorcode:
raise RuntimeError('%s returned an error: %d' %
(self.name, errorcode))
self.read_results() | Internal calculation executor. We cannot use FileIOCalculator
directly since we need to support remote execution.
This calculator is different from others.
It prepares the directory, launches the remote process and
raises the exception to signal that we need to come back for results
when the job is finished. | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/parcalc/parcalc.py#L425-L457 | null | class RemoteCalculator(Calculator):
'''
Remote calculator based on ASE calculator class.
This class is only involved with the machanics of remotly executing
the software and transporting the data. The calculation is
delegated to the actual calculator class.
'''
# Queue system submit command
qsub_tool='qsub'
qstat_tool='qstat'
qsub_cmd='cd %(rdir)s ; %(qsub_tool)s -N %(title)s -l procs=%(procs)d ./run-pw.pbs'
# Remote execution command
remote_exec_cmd='ssh %(user)s@%(host)s "%(command)s"'
# If you cannot mount the data directory into your system it is best
# to use the rsync command to transfer the results back into the system.
# Command for copying the data out to the computing system
copy_out_cmd='rsync -a "%(ldir)s" "%(user)s@%(host)s:%(rdir)s"'
# Command for copying the data in after the calculation
copy_in_cmd='rsync -a "%(user)s@%(host)s:%(rdir)s" "%(ldir)s"'
# Template for the PBS batch job
pbs_template=''
# Command to check the state of the job
pbs_check_cmd='''%(qstat_tool)s -f %(jobid)s |grep job_state |awk '{print $3}' '''
# Access data
host=''
user=''
# Location:
# local working directory
wdir='.'
# Remote working directory relative to the home directory or absolute
rdir='.'
# Repetition timer (seconds) for checkin the state of the job.
job_check_time=15
def __init__(self, restart=None, ignore_bad_restart_file=False, label=None,
atoms=None, calc=None, block=False, **kwargs):
'''Basic calculator implementation.
restart: str
Prefix for restart file. May contain a directory. Default
is None: don't restart.
ignore_bad_restart_file: bool
Ignore broken or missing restart file. By default, it is an
error if the restart file is missing or broken.
label: str
Name used for all files. May contain a directory.
atoms: Atoms object
Optional Atoms object to which the calculator will be
attached. When restarting, atoms will get its positions and
unit-cell updated from file.
Create a remote execution calculator based on actual ASE calculator
calc.
'''
logging.debug("Calc: %s Label: %s" % (calc, label))
Calculator.__init__(self, restart, ignore_bad_restart_file, label, atoms, **kwargs)
logging.debug("Dir: %s Ext: %s" % (self.directory, self.ext))
self.calc=calc
self.jobid=None
self.block=block
def write_pbs_in(self,properties):
with work_dir(self.directory):
with open(os.path.join(self.directory,'run-ase-calc.pbs'),'w') as fh:
fh.write(self.pbs_template % {
'command': self.build_command(self,prop=properties,
params=self.parameters)
})
def build_command(self,prop=['energy'],params={}):
cmd=self.qsub_cmd % {
'qsub_tool': self.qsub_tool,
'qstat_tool': self.qstat_tool,
'title': self.label,
'procs': self.parameters['procs'],
'rdir': os.path.join(self.parameters['rdir'],os.path.split(self.directory)[-1])
}
cmd=self.remote_exec_cmd % {
'command': cmd,
'user': self.parameters['user'],
'host': self.parameters['host']
}
return cmd
def write_input(self, atoms=None, properties=['energy'], system_changes=all_changes):
'''Write input file(s).'''
with work_dir(self.directory):
self.calc.write_input(self, atoms, properties, system_changes)
self.write_pbs_in(properties)
subprocess.call(self.copy_out_cmd % {
'ldir': self.directory,
'rdir': self.parameters['rdir'],
'user': self.parameters['user'],
'host': self.parameters['host']
}, shell=True)
def job_ready(self):
try :
cmd=self.remote_exec_cmd % {
'command': self.pbs_check_cmd % {
'qsub_tool': self.qsub_tool,
'qstat_tool': self.qstat_tool,
'jobid':self.jobid
},
'user': self.parameters['user'],
'host': self.parameters['host']
}
state=subprocess.check_output(cmd, shell=True).split()[-1]
except (subprocess.CalledProcessError, IndexError) :
# Unknown state. We assume it has finished and continue
state='N'
return not (state in ['Q','R'])
def run_calculation(self, atoms=None, properties=['energy'],
system_changes=all_changes):
'''
Internal calculation executor. We cannot use FileIOCalculator
directly since we need to support remote execution.
This calculator is different from others.
It prepares the directory, launches the remote process and
raises the exception to signal that we need to come back for results
when the job is finished.
'''
self.calc.calculate(self, atoms, properties, system_changes)
self.write_input(self.atoms, properties, system_changes)
if self.command is None:
raise RuntimeError('Please configure Remote calculator!')
olddir = os.getcwd()
errorcode=0
try:
os.chdir(self.directory)
output = subprocess.check_output(self.command, shell=True)
self.jobid=output.split()[0]
self.submited=True
#print "Job %s submitted. Waiting for it." % (self.jobid)
# Waiting loop. To be removed.
except subprocess.CalledProcessError as e:
errorcode=e.returncode
finally:
os.chdir(olddir)
if errorcode:
raise RuntimeError('%s returned an error: %d' %
(self.name, errorcode))
self.read_results()
def read_results(self):
"""Read energy, forces, ... from output file(s)."""
if self.submited:
# The job has been submitted. Check the state.
if not self.job_ready() :
if self.block :
while not self.job_ready() :
time.sleep(self.job_check_time)
else :
raise CalcNotReadyError
# Assume the calc finished. Copy the files back.
subprocess.call(self.copy_in_cmd % {
'ldir': self.wdir,
'rdir': os.path.join(self.parameters['rdir'],os.path.split(self.directory)[-1]),
'user': self.parameters['user'],
'host': self.parameters['host']
}, shell=True)
fn=os.path.join(self.directory,'pw.out')
# Read the pan-ultimate line of the output file
try:
ln=open(fn).readlines()[-2]
if ln.find('JOB DONE.')>-1 :
# Job is done we can read the output
r=read_quantumespresso_textoutput(fn)
self.submited=False
self.jobid=None
else :
# Job not ready.
raise CalcNotReadyError
except (IOError, IndexError) :
# Job not ready.
raise CalcNotReadyError
# All is fine - really read the results
self.calc.read_results(self)
@classmethod
def ParallelCalculate(cls,syslst,properties=['energy'],system_changes=all_changes):
'''
Run a series of calculations in parallel using (implicitely) some
remote machine/cluster. The function returns the list of systems ready
for the extraction of calculated properties.
'''
print('Launching:',end=' ')
sys.stdout.flush()
for n,s in enumerate(syslst):
try :
s.calc.block=False
s.calc.calculate(atoms=s,properties=properties,system_changes=system_changes)
except CalcNotReadyError:
s.calc.block=True
print(n+1, end=' ')
sys.stdout.flush()
print()
print(' Done:', end=' ')
sys.stdout.flush()
for n,s in enumerate(syslst):
s.calc.read_results()
print( n+1, end=' ')
sys.stdout.flush()
print()
return syslst
|
jochym/Elastic | parcalc/parcalc.py | RemoteCalculator.read_results | python | def read_results(self):
if self.submited:
# The job has been submitted. Check the state.
if not self.job_ready() :
if self.block :
while not self.job_ready() :
time.sleep(self.job_check_time)
else :
raise CalcNotReadyError
# Assume the calc finished. Copy the files back.
subprocess.call(self.copy_in_cmd % {
'ldir': self.wdir,
'rdir': os.path.join(self.parameters['rdir'],os.path.split(self.directory)[-1]),
'user': self.parameters['user'],
'host': self.parameters['host']
}, shell=True)
fn=os.path.join(self.directory,'pw.out')
# Read the pan-ultimate line of the output file
try:
ln=open(fn).readlines()[-2]
if ln.find('JOB DONE.')>-1 :
# Job is done we can read the output
r=read_quantumespresso_textoutput(fn)
self.submited=False
self.jobid=None
else :
# Job not ready.
raise CalcNotReadyError
except (IOError, IndexError) :
# Job not ready.
raise CalcNotReadyError
# All is fine - really read the results
self.calc.read_results(self) | Read energy, forces, ... from output file(s). | train | https://github.com/jochym/Elastic/blob/8daae37d0c48aab8dfb1de2839dab02314817f95/parcalc/parcalc.py#L460-L499 | null | class RemoteCalculator(Calculator):
'''
Remote calculator based on ASE calculator class.
This class is only involved with the machanics of remotly executing
the software and transporting the data. The calculation is
delegated to the actual calculator class.
'''
# Queue system submit command
qsub_tool='qsub'
qstat_tool='qstat'
qsub_cmd='cd %(rdir)s ; %(qsub_tool)s -N %(title)s -l procs=%(procs)d ./run-pw.pbs'
# Remote execution command
remote_exec_cmd='ssh %(user)s@%(host)s "%(command)s"'
# If you cannot mount the data directory into your system it is best
# to use the rsync command to transfer the results back into the system.
# Command for copying the data out to the computing system
copy_out_cmd='rsync -a "%(ldir)s" "%(user)s@%(host)s:%(rdir)s"'
# Command for copying the data in after the calculation
copy_in_cmd='rsync -a "%(user)s@%(host)s:%(rdir)s" "%(ldir)s"'
# Template for the PBS batch job
pbs_template=''
# Command to check the state of the job
pbs_check_cmd='''%(qstat_tool)s -f %(jobid)s |grep job_state |awk '{print $3}' '''
# Access data
host=''
user=''
# Location:
# local working directory
wdir='.'
# Remote working directory relative to the home directory or absolute
rdir='.'
# Repetition timer (seconds) for checkin the state of the job.
job_check_time=15
def __init__(self, restart=None, ignore_bad_restart_file=False, label=None,
atoms=None, calc=None, block=False, **kwargs):
'''Basic calculator implementation.
restart: str
Prefix for restart file. May contain a directory. Default
is None: don't restart.
ignore_bad_restart_file: bool
Ignore broken or missing restart file. By default, it is an
error if the restart file is missing or broken.
label: str
Name used for all files. May contain a directory.
atoms: Atoms object
Optional Atoms object to which the calculator will be
attached. When restarting, atoms will get its positions and
unit-cell updated from file.
Create a remote execution calculator based on actual ASE calculator
calc.
'''
logging.debug("Calc: %s Label: %s" % (calc, label))
Calculator.__init__(self, restart, ignore_bad_restart_file, label, atoms, **kwargs)
logging.debug("Dir: %s Ext: %s" % (self.directory, self.ext))
self.calc=calc
self.jobid=None
self.block=block
def write_pbs_in(self,properties):
with work_dir(self.directory):
with open(os.path.join(self.directory,'run-ase-calc.pbs'),'w') as fh:
fh.write(self.pbs_template % {
'command': self.build_command(self,prop=properties,
params=self.parameters)
})
def build_command(self,prop=['energy'],params={}):
cmd=self.qsub_cmd % {
'qsub_tool': self.qsub_tool,
'qstat_tool': self.qstat_tool,
'title': self.label,
'procs': self.parameters['procs'],
'rdir': os.path.join(self.parameters['rdir'],os.path.split(self.directory)[-1])
}
cmd=self.remote_exec_cmd % {
'command': cmd,
'user': self.parameters['user'],
'host': self.parameters['host']
}
return cmd
def write_input(self, atoms=None, properties=['energy'], system_changes=all_changes):
'''Write input file(s).'''
with work_dir(self.directory):
self.calc.write_input(self, atoms, properties, system_changes)
self.write_pbs_in(properties)
subprocess.call(self.copy_out_cmd % {
'ldir': self.directory,
'rdir': self.parameters['rdir'],
'user': self.parameters['user'],
'host': self.parameters['host']
}, shell=True)
def job_ready(self):
try :
cmd=self.remote_exec_cmd % {
'command': self.pbs_check_cmd % {
'qsub_tool': self.qsub_tool,
'qstat_tool': self.qstat_tool,
'jobid':self.jobid
},
'user': self.parameters['user'],
'host': self.parameters['host']
}
state=subprocess.check_output(cmd, shell=True).split()[-1]
except (subprocess.CalledProcessError, IndexError) :
# Unknown state. We assume it has finished and continue
state='N'
return not (state in ['Q','R'])
def run_calculation(self, atoms=None, properties=['energy'],
system_changes=all_changes):
'''
Internal calculation executor. We cannot use FileIOCalculator
directly since we need to support remote execution.
This calculator is different from others.
It prepares the directory, launches the remote process and
raises the exception to signal that we need to come back for results
when the job is finished.
'''
self.calc.calculate(self, atoms, properties, system_changes)
self.write_input(self.atoms, properties, system_changes)
if self.command is None:
raise RuntimeError('Please configure Remote calculator!')
olddir = os.getcwd()
errorcode=0
try:
os.chdir(self.directory)
output = subprocess.check_output(self.command, shell=True)
self.jobid=output.split()[0]
self.submited=True
#print "Job %s submitted. Waiting for it." % (self.jobid)
# Waiting loop. To be removed.
except subprocess.CalledProcessError as e:
errorcode=e.returncode
finally:
os.chdir(olddir)
if errorcode:
raise RuntimeError('%s returned an error: %d' %
(self.name, errorcode))
self.read_results()
def read_results(self):
"""Read energy, forces, ... from output file(s)."""
if self.submited:
# The job has been submitted. Check the state.
if not self.job_ready() :
if self.block :
while not self.job_ready() :
time.sleep(self.job_check_time)
else :
raise CalcNotReadyError
# Assume the calc finished. Copy the files back.
subprocess.call(self.copy_in_cmd % {
'ldir': self.wdir,
'rdir': os.path.join(self.parameters['rdir'],os.path.split(self.directory)[-1]),
'user': self.parameters['user'],
'host': self.parameters['host']
}, shell=True)
fn=os.path.join(self.directory,'pw.out')
# Read the pan-ultimate line of the output file
try:
ln=open(fn).readlines()[-2]
if ln.find('JOB DONE.')>-1 :
# Job is done we can read the output
r=read_quantumespresso_textoutput(fn)
self.submited=False
self.jobid=None
else :
# Job not ready.
raise CalcNotReadyError
except (IOError, IndexError) :
# Job not ready.
raise CalcNotReadyError
# All is fine - really read the results
self.calc.read_results(self)
@classmethod
def ParallelCalculate(cls,syslst,properties=['energy'],system_changes=all_changes):
'''
Run a series of calculations in parallel using (implicitely) some
remote machine/cluster. The function returns the list of systems ready
for the extraction of calculated properties.
'''
print('Launching:',end=' ')
sys.stdout.flush()
for n,s in enumerate(syslst):
try :
s.calc.block=False
s.calc.calculate(atoms=s,properties=properties,system_changes=system_changes)
except CalcNotReadyError:
s.calc.block=True
print(n+1, end=' ')
sys.stdout.flush()
print()
print(' Done:', end=' ')
sys.stdout.flush()
for n,s in enumerate(syslst):
s.calc.read_results()
print( n+1, end=' ')
sys.stdout.flush()
print()
return syslst
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.