text stringlengths 81 112k |
|---|
Partial update of a document in named index.
Partial updates are invoked via a call to save the document
with 'update_fields'. These fields are passed to the
as_search_document method so that it can build a partial
document. NB we don't just call as_search_document and then
strip the fields _not_ in update_fields as we are trying
to avoid possibly expensive operations in building the
source document. The canonical example for this method
is updating a single timestamp on a model - we don't want
to have to walk the model relations and build a document
in this case - we just want to push the timestamp.
When POSTing a partial update the `as_search_document` doc
must be passed to the `client.update` wrapped in a "doc" node,
see: https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update.html
def update_search_document(self, *, index, update_fields):
"""
Partial update of a document in named index.
Partial updates are invoked via a call to save the document
with 'update_fields'. These fields are passed to the
as_search_document method so that it can build a partial
document. NB we don't just call as_search_document and then
strip the fields _not_ in update_fields as we are trying
to avoid possibly expensive operations in building the
source document. The canonical example for this method
is updating a single timestamp on a model - we don't want
to have to walk the model relations and build a document
in this case - we just want to push the timestamp.
When POSTing a partial update the `as_search_document` doc
must be passed to the `client.update` wrapped in a "doc" node,
see: https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update.html
"""
doc = self.as_search_document_update(index=index, update_fields=update_fields)
if not doc:
logger.debug("Ignoring object update as document is empty.")
return
get_client().update(
index=index, doc_type=self.search_doc_type, body={"doc": doc}, id=self.pk
) |
Delete document from named index.
def delete_search_document(self, *, index):
"""Delete document from named index."""
cache.delete(self.search_document_cache_key)
get_client().delete(index=index, doc_type=self.search_doc_type, id=self.pk) |
Create a new SearchQuery instance and execute a search against ES.
def execute(cls, search, search_terms="", user=None, reference=None, save=True):
"""Create a new SearchQuery instance and execute a search against ES."""
warnings.warn(
"Pending deprecation - please use `execute_search` function instead.",
PendingDeprecationWarning,
)
return execute_search(
search, search_terms=search_terms, user=user, reference=reference, save=save
) |
Save and return the object (for chaining).
def save(self, **kwargs):
"""Save and return the object (for chaining)."""
if self.search_terms is None:
self.search_terms = ""
super().save(**kwargs)
return self |
Return the query from:size tuple (0-based).
def page_slice(self):
"""Return the query from:size tuple (0-based)."""
return (
None
if self.query is None
else (self.query.get("from", 0), self.query.get("size", 10))
) |
Sets the value of a plugin setting.
:param name: the name of the setting. It is not the full path, but just the last name of it
:param value: the value to set for the plugin setting
:param namespace: The namespace. If not passed or None, the namespace will be inferred from
the caller method. Normally, this should not be passed, since it suffices to let this function
find out the plugin from where it is being called, and it will automatically use the
corresponding plugin namespace
def setPluginSetting(name, value, namespace = None):
'''
Sets the value of a plugin setting.
:param name: the name of the setting. It is not the full path, but just the last name of it
:param value: the value to set for the plugin setting
:param namespace: The namespace. If not passed or None, the namespace will be inferred from
the caller method. Normally, this should not be passed, since it suffices to let this function
find out the plugin from where it is being called, and it will automatically use the
corresponding plugin namespace
'''
namespace = namespace or _callerName().split(".")[0]
settings.setValue(namespace + "/" + name, value) |
Returns the value of a plugin setting.
:param name: the name of the setting. It is not the full path, but just the last name of it
:param namespace: The namespace. If not passed or None, the namespace will be inferred from
the caller method. Normally, this should not be passed, since it suffices to let this function
find out the plugin from where it is being called, and it will automatically use the
corresponding plugin namespace
def pluginSetting(name, namespace=None, typ=None):
'''
Returns the value of a plugin setting.
:param name: the name of the setting. It is not the full path, but just the last name of it
:param namespace: The namespace. If not passed or None, the namespace will be inferred from
the caller method. Normally, this should not be passed, since it suffices to let this function
find out the plugin from where it is being called, and it will automatically use the
corresponding plugin namespace
'''
def _find_in_cache(name, key):
for setting in _settings[namespace]:
if setting["name"] == name:
return setting[key]
return None
def _type_map(t):
"""Return setting python type"""
if t == BOOL:
return bool
elif t == NUMBER:
return float
else:
return unicode
namespace = namespace or _callerName().split(".")[0]
full_name = namespace + "/" + name
if settings.contains(full_name):
if typ is None:
typ = _type_map(_find_in_cache(name, 'type'))
v = settings.value(full_name, None, type=typ)
try:
if isinstance(v, QPyNullVariant):
v = None
except:
pass
return v
else:
return _find_in_cache(name, 'default') |
Reads the settings corresponding to the plugin from where the method is called.
This function has to be called in the __init__ method of the plugin class.
Settings are stored in a settings.json file in the plugin folder.
Here is an eample of such a file:
[
{"name":"mysetting",
"label": "My setting",
"description": "A setting to customize my plugin",
"type": "string",
"default": "dummy string",
"group": "Group 1"
"onEdit": "def f():\\n\\tprint "Value edited in settings dialog"
"onChange": "def f():\\n\\tprint "New settings value has been saved"
},
{"name":"anothersetting",
"label": "Another setting",
"description": "Another setting to customize my plugin",
"type": "number",
"default": 0,
"group": "Group 2"
},
{"name":"achoicesetting",
"label": "A choice setting",
"description": "A setting to select from a set of possible options",
"type": "choice",
"default": "option 1",
"options":["option 1", "option 2", "option 3"],
"group": "Group 2"
}
]
Available types for settings are: string, bool, number, choice, crs and text (a multiline string)
The onEdit property contains a function that will be executed when the user edits the value
in the settings dialog. It shouldl return false if, after it has been executed, the setting
should not be modified and should recover its original value.
The onEdit property contains a function that will be executed when the setting is changed after
closing the settings dialog, or programatically by callin the setPluginSetting method
Both onEdit and onChange are optional properties
def readSettings(settings_path=None):
global _settings
'''
Reads the settings corresponding to the plugin from where the method is called.
This function has to be called in the __init__ method of the plugin class.
Settings are stored in a settings.json file in the plugin folder.
Here is an eample of such a file:
[
{"name":"mysetting",
"label": "My setting",
"description": "A setting to customize my plugin",
"type": "string",
"default": "dummy string",
"group": "Group 1"
"onEdit": "def f():\\n\\tprint "Value edited in settings dialog"
"onChange": "def f():\\n\\tprint "New settings value has been saved"
},
{"name":"anothersetting",
"label": "Another setting",
"description": "Another setting to customize my plugin",
"type": "number",
"default": 0,
"group": "Group 2"
},
{"name":"achoicesetting",
"label": "A choice setting",
"description": "A setting to select from a set of possible options",
"type": "choice",
"default": "option 1",
"options":["option 1", "option 2", "option 3"],
"group": "Group 2"
}
]
Available types for settings are: string, bool, number, choice, crs and text (a multiline string)
The onEdit property contains a function that will be executed when the user edits the value
in the settings dialog. It shouldl return false if, after it has been executed, the setting
should not be modified and should recover its original value.
The onEdit property contains a function that will be executed when the setting is changed after
closing the settings dialog, or programatically by callin the setPluginSetting method
Both onEdit and onChange are optional properties
'''
namespace = _callerName().split(".")[0]
settings_path = settings_path or os.path.join(os.path.dirname(_callerPath()), "settings.json")
with open(settings_path) as f:
_settings[namespace] = json.load(f) |
Delete search index.
def do_index_command(self, index, **options):
"""Delete search index."""
if options["interactive"]:
logger.warning("This will permanently delete the index '%s'.", index)
if not self._confirm_action():
logger.warning(
"Aborting deletion of index '%s' at user's request.", index
)
return
return delete_index(index) |
Create an index and apply mapping if appropriate.
def create_index(index):
"""Create an index and apply mapping if appropriate."""
logger.info("Creating search index: '%s'", index)
client = get_client()
return client.indices.create(index=index, body=get_index_mapping(index)) |
Re-index every document in a named index.
def update_index(index):
"""Re-index every document in a named index."""
logger.info("Updating search index: '%s'", index)
client = get_client()
responses = []
for model in get_index_models(index):
logger.info("Updating search index model: '%s'", model.search_doc_type)
objects = model.objects.get_search_queryset(index).iterator()
actions = bulk_actions(objects, index=index, action="index")
response = helpers.bulk(client, actions, chunk_size=get_setting("chunk_size"))
responses.append(response)
return responses |
Delete index entirely (removes all documents and mapping).
def delete_index(index):
"""Delete index entirely (removes all documents and mapping)."""
logger.info("Deleting search index: '%s'", index)
client = get_client()
return client.indices.delete(index=index) |
Remove all orphaned documents from an index.
This function works by scanning the remote index, and in each returned
batch of documents looking up whether they appear in the default index
queryset. If they don't (they've been deleted, or no longer fit the qs
filters) then they are deleted from the index. The deletion is done in
one hit after the entire remote index has been scanned.
The elasticsearch.helpers.scan function returns each document one at a
time, so this function can swamp the database with SELECT requests.
Please use sparingly.
Returns a list of ids of all the objects deleted.
def prune_index(index):
"""Remove all orphaned documents from an index.
This function works by scanning the remote index, and in each returned
batch of documents looking up whether they appear in the default index
queryset. If they don't (they've been deleted, or no longer fit the qs
filters) then they are deleted from the index. The deletion is done in
one hit after the entire remote index has been scanned.
The elasticsearch.helpers.scan function returns each document one at a
time, so this function can swamp the database with SELECT requests.
Please use sparingly.
Returns a list of ids of all the objects deleted.
"""
logger.info("Pruning missing objects from index '%s'", index)
prunes = []
responses = []
client = get_client()
for model in get_index_models(index):
for hit in scan_index(index, model):
obj = _prune_hit(hit, model)
if obj:
prunes.append(obj)
logger.info(
"Found %s objects of type '%s' for deletion from '%s'.",
len(prunes),
model,
index,
)
if len(prunes) > 0:
actions = bulk_actions(prunes, index, "delete")
response = helpers.bulk(
client, actions, chunk_size=get_setting("chunk_size")
)
responses.append(response)
return responses |
Check whether a document should be pruned.
This method uses the SearchDocumentManagerMixin.in_search_queryset method
to determine whether a 'hit' (search document) should be pruned from an index,
and if so it returns the hit as a Django object(id=hit_id).
Args:
hit: dict object the represents a document as returned from the scan_index
function. (Contains object id and index.)
model: the Django model (not object) from which the document was derived.
Used to get the correct model manager and bulk action.
Returns:
an object of type model, with id=hit_id. NB this is not the object
itself, which by definition may not exist in the underlying database,
but a temporary object with the document id - which is enough to create
a 'delete' action.
def _prune_hit(hit, model):
"""
Check whether a document should be pruned.
This method uses the SearchDocumentManagerMixin.in_search_queryset method
to determine whether a 'hit' (search document) should be pruned from an index,
and if so it returns the hit as a Django object(id=hit_id).
Args:
hit: dict object the represents a document as returned from the scan_index
function. (Contains object id and index.)
model: the Django model (not object) from which the document was derived.
Used to get the correct model manager and bulk action.
Returns:
an object of type model, with id=hit_id. NB this is not the object
itself, which by definition may not exist in the underlying database,
but a temporary object with the document id - which is enough to create
a 'delete' action.
"""
hit_id = hit["_id"]
hit_index = hit["_index"]
if model.objects.in_search_queryset(hit_id, index=hit_index):
logger.debug(
"%s with id=%s exists in the '%s' index queryset.", model, hit_id, hit_index
)
return None
else:
logger.debug(
"%s with id=%s does not exist in the '%s' index queryset and will be pruned.",
model,
hit_id,
hit_index,
)
# we don't need the full obj for a delete action, just the id.
# (the object itself may not even exist.)
return model(pk=hit_id) |
Yield all documents of model type in an index.
This function calls the elasticsearch.helpers.scan function,
and yields all the documents in the index that match the doc_type
produced by a specific Django model.
Args:
index: string, the name of the index to scan, must be a configured
index as returned from settings.get_index_names.
model: a Django model type, used to filter the the documents that
are scanned.
Yields each document of type model in index, one at a time.
def scan_index(index, model):
"""
Yield all documents of model type in an index.
This function calls the elasticsearch.helpers.scan function,
and yields all the documents in the index that match the doc_type
produced by a specific Django model.
Args:
index: string, the name of the index to scan, must be a configured
index as returned from settings.get_index_names.
model: a Django model type, used to filter the the documents that
are scanned.
Yields each document of type model in index, one at a time.
"""
# see https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-type-query.html
query = {"query": {"type": {"value": model._meta.model_name}}}
client = get_client()
for hit in helpers.scan(client, index=index, query=query):
yield hit |
Yield bulk api 'actions' from a collection of objects.
The output from this method can be fed in to the bulk
api helpers - each document returned by get_documents
is decorated with the appropriate bulk api op_type.
Args:
objects: iterable (queryset, list, ...) of SearchDocumentMixin
objects. If the objects passed in is a generator, then this
function will yield the results rather than returning them.
index: string, the name of the index to target - the index name
is embedded into the return value and is used by the bulk api.
action: string ['index' | 'update' | 'delete'] - this decides
how the final document is formatted.
def bulk_actions(objects, index, action):
"""
Yield bulk api 'actions' from a collection of objects.
The output from this method can be fed in to the bulk
api helpers - each document returned by get_documents
is decorated with the appropriate bulk api op_type.
Args:
objects: iterable (queryset, list, ...) of SearchDocumentMixin
objects. If the objects passed in is a generator, then this
function will yield the results rather than returning them.
index: string, the name of the index to target - the index name
is embedded into the return value and is used by the bulk api.
action: string ['index' | 'update' | 'delete'] - this decides
how the final document is formatted.
"""
assert (
index != "_all"
), "index arg must be a valid index name. '_all' is a reserved term."
logger.info("Creating bulk '%s' actions for '%s'", action, index)
for obj in objects:
try:
logger.debug("Appending '%s' action for '%r'", action, obj)
yield obj.as_search_action(index=index, action=action)
except Exception:
logger.exception("Unable to create search action for %s", obj) |
Validate settings.SEARCH_SETTINGS.
def _validate_config(strict=False):
"""Validate settings.SEARCH_SETTINGS."""
for index in settings.get_index_names():
_validate_mapping(index, strict=strict)
for model in settings.get_index_models(index):
_validate_model(model)
if settings.get_setting("update_strategy", "full") not in ["full", "partial"]:
raise ImproperlyConfigured(
"Invalid SEARCH_SETTINGS: 'update_strategy' value must be 'full' or 'partial'."
) |
Check that an index mapping JSON file exists.
def _validate_mapping(index, strict=False):
"""Check that an index mapping JSON file exists."""
try:
settings.get_index_mapping(index)
except IOError:
if strict:
raise ImproperlyConfigured("Index '%s' has no mapping file." % index)
else:
logger.warning("Index '%s' has no mapping, relying on ES instead.", index) |
Check that a model configured for an index subclasses the required classes.
def _validate_model(model):
"""Check that a model configured for an index subclasses the required classes."""
if not hasattr(model, "as_search_document"):
raise ImproperlyConfigured("'%s' must implement `as_search_document`." % model)
if not hasattr(model.objects, "get_search_queryset"):
raise ImproperlyConfigured(
"'%s.objects must implement `get_search_queryset`." % model
) |
Connect up post_save, post_delete signals for models.
def _connect_signals():
"""Connect up post_save, post_delete signals for models."""
for index in settings.get_index_names():
for model in settings.get_index_models(index):
_connect_model_signals(model) |
Connect signals for a single model.
def _connect_model_signals(model):
"""Connect signals for a single model."""
dispatch_uid = "%s.post_save" % model._meta.model_name
logger.debug("Connecting search index model post_save signal: %s", dispatch_uid)
signals.post_save.connect(_on_model_save, sender=model, dispatch_uid=dispatch_uid)
dispatch_uid = "%s.post_delete" % model._meta.model_name
logger.debug("Connecting search index model post_delete signal: %s", dispatch_uid)
signals.post_delete.connect(
_on_model_delete, sender=model, dispatch_uid=dispatch_uid
) |
Update document in search index post_save.
def _on_model_save(sender, **kwargs):
"""Update document in search index post_save."""
instance = kwargs.pop("instance")
update_fields = kwargs.pop("update_fields")
for index in instance.search_indexes:
try:
_update_search_index(
instance=instance, index=index, update_fields=update_fields
)
except Exception:
logger.exception("Error handling 'on_save' signal for %s", instance) |
Remove documents from search indexes post_delete.
def _on_model_delete(sender, **kwargs):
"""Remove documents from search indexes post_delete."""
instance = kwargs.pop("instance")
for index in instance.search_indexes:
try:
_delete_from_search_index(instance=instance, index=index)
except Exception:
logger.exception("Error handling 'on_delete' signal for %s", instance) |
Wrapper around the instance manager method.
def _in_search_queryset(*, instance, index) -> bool:
"""Wrapper around the instance manager method."""
try:
return instance.__class__.objects.in_search_queryset(instance.id, index=index)
except Exception:
logger.exception("Error checking object in_search_queryset.")
return False |
Process index / update search index update actions.
def _update_search_index(*, instance, index, update_fields):
"""Process index / update search index update actions."""
if not _in_search_queryset(instance=instance, index=index):
logger.debug(
"Object (%r) is not in search queryset, ignoring update.", instance
)
return
try:
if update_fields:
pre_update.send(
sender=instance.__class__,
instance=instance,
index=index,
update_fields=update_fields,
)
if settings.auto_sync(instance):
instance.update_search_document(
index=index, update_fields=update_fields
)
else:
pre_index.send(sender=instance.__class__, instance=instance, index=index)
if settings.auto_sync(instance):
instance.index_search_document(index=index)
except Exception:
logger.exception("Error handling 'post_save' signal for %s", instance) |
Remove a document from a search index.
def _delete_from_search_index(*, instance, index):
"""Remove a document from a search index."""
pre_delete.send(sender=instance.__class__, instance=instance, index=index)
if settings.auto_sync(instance):
instance.delete_search_document(index=index) |
Validate config and connect signals.
def ready(self):
"""Validate config and connect signals."""
super(ElasticAppConfig, self).ready()
_validate_config(settings.get_setting("strict_validation"))
_connect_signals() |
Return specific search setting from Django conf.
def get_setting(key, *default):
"""Return specific search setting from Django conf."""
if default:
return get_settings().get(key, default[0])
else:
return get_settings()[key] |
Return the JSON mapping file for an index.
Mappings are stored as JSON files in the mappings subdirectory of this
app. They must be saved as {{index}}.json.
Args:
index: string, the name of the index to look for.
def get_index_mapping(index):
"""Return the JSON mapping file for an index.
Mappings are stored as JSON files in the mappings subdirectory of this
app. They must be saved as {{index}}.json.
Args:
index: string, the name of the index to look for.
"""
# app_path = apps.get_app_config('elasticsearch_django').path
mappings_dir = get_setting("mappings_dir")
filename = "%s.json" % index
path = os.path.join(mappings_dir, filename)
with open(path, "r") as f:
return json.load(f) |
Return the list of properties specified for a model in an index.
def get_model_index_properties(instance, index):
"""Return the list of properties specified for a model in an index."""
mapping = get_index_mapping(index)
doc_type = instance._meta.model_name.lower()
return list(mapping["mappings"][doc_type]["properties"].keys()) |
Return list of models configured for a named index.
Args:
index: string, the name of the index to look up.
def get_index_models(index):
"""Return list of models configured for a named index.
Args:
index: string, the name of the index to look up.
"""
models = []
for app_model in get_index_config(index).get("models"):
app, model = app_model.split(".")
models.append(apps.get_model(app, model))
return models |
Return list of all indexes in which a model is configured.
A model may be configured to appear in multiple indexes. This function
will return the names of the indexes as a list of strings. This is
useful if you want to know which indexes need updating when a model
is saved.
Args:
model: a Django model class.
def get_model_indexes(model):
"""Return list of all indexes in which a model is configured.
A model may be configured to appear in multiple indexes. This function
will return the names of the indexes as a list of strings. This is
useful if you want to know which indexes need updating when a model
is saved.
Args:
model: a Django model class.
"""
indexes = []
for index in get_index_names():
for app_model in get_index_models(index):
if app_model == model:
indexes.append(index)
return indexes |
Return dict of index.doc_type: model.
def get_document_models():
"""Return dict of index.doc_type: model."""
mappings = {}
for i in get_index_names():
for m in get_index_models(i):
key = "%s.%s" % (i, m._meta.model_name)
mappings[key] = m
return mappings |
Returns bool if auto_sync is on for the model (instance)
def auto_sync(instance):
"""Returns bool if auto_sync is on for the model (instance)"""
# this allows us to turn off sync temporarily - e.g. when doing bulk updates
if not get_setting("auto_sync"):
return False
model_name = "{}.{}".format(instance._meta.app_label, instance._meta.model_name)
if model_name in get_setting("never_auto_sync", []):
return False
return True |
Returns an indented HTML pretty-print version of JSON.
Take the event_payload JSON, indent it, order the keys and then
present it as a <code> block. That's about as good as we can get
until someone builds a custom syntax function.
def pprint(data):
"""
Returns an indented HTML pretty-print version of JSON.
Take the event_payload JSON, indent it, order the keys and then
present it as a <code> block. That's about as good as we can get
until someone builds a custom syntax function.
"""
pretty = json.dumps(data, sort_keys=True, indent=4, separators=(",", ": "))
html = pretty.replace(" ", " ").replace("\n", "<br>")
return mark_safe("<code>%s</code>" % html) |
Adds a help menu to the plugin menu.
This method should be called from the initGui() method of the plugin
:param menuName: The name of the plugin menu in which the about menu is to be added.
def addHelpMenu(menuName, parentMenuFunction=None):
'''
Adds a help menu to the plugin menu.
This method should be called from the initGui() method of the plugin
:param menuName: The name of the plugin menu in which the about menu is to be added.
'''
parentMenuFunction = parentMenuFunction or iface.addPluginToMenu
namespace = _callerName().split(".")[0]
path = "file://{}".format(os.path.join(os.path.dirname(_callerPath()), "docs", "html", "index.html"))
helpAction = QtWidgets.QAction(QgsApplication.getThemeIcon('/mActionHelpContents.svg'),
"Plugin help...", iface.mainWindow())
helpAction.setObjectName(namespace + "help")
helpAction.triggered.connect(lambda: openHelp(path))
parentMenuFunction(menuName, helpAction)
global _helpActions
_helpActions[menuName] = helpAction |
Adds an 'about...' menu to the plugin menu.
This method should be called from the initGui() method of the plugin
:param menuName: The name of the plugin menu in which the about menu is to be added
def addAboutMenu(menuName, parentMenuFunction=None):
'''
Adds an 'about...' menu to the plugin menu.
This method should be called from the initGui() method of the plugin
:param menuName: The name of the plugin menu in which the about menu is to be added
'''
parentMenuFunction = parentMenuFunction or iface.addPluginToMenu
namespace = _callerName().split(".")[0]
icon = QtGui.QIcon(os.path.join(os.path.dirname(os.path.dirname(__file__)), "icons", "help.png"))
aboutAction = QtWidgets.QAction(icon, "About...", iface.mainWindow())
aboutAction.setObjectName(namespace + "about")
aboutAction.triggered.connect(lambda: openAboutDialog(namespace))
parentMenuFunction(menuName, aboutAction)
global _aboutActions
_aboutActions[menuName] = aboutAction |
Show a dialog containing a given text, with a given title.
The text accepts HTML syntax
def showMessageDialog(title, text):
'''
Show a dialog containing a given text, with a given title.
The text accepts HTML syntax
'''
dlg = QgsMessageOutput.createMessageOutput()
dlg.setTitle(title)
dlg.setMessage(text, QgsMessageOutput.MessageHtml)
dlg.showMessage() |
Asks for a file or files, opening the corresponding dialog with the last path that was selected
when this same function was invoked from the calling method.
:param parent: The parent window
:param msg: The message to use for the dialog title
:param isSave: true if we are asking for file to save
:param allowMultiple: True if should allow multiple files to be selected. Ignored if isSave == True
:param exts: Extensions to allow in the file dialog. Can be a single string or a list of them.
Use "*" to add an option that allows all files to be selected
:returns: A string with the selected filepath or an array of them, depending on whether allowMultiple is True of False
def askForFiles(parent, msg = None, isSave = False, allowMultiple = False, exts = "*"):
'''
Asks for a file or files, opening the corresponding dialog with the last path that was selected
when this same function was invoked from the calling method.
:param parent: The parent window
:param msg: The message to use for the dialog title
:param isSave: true if we are asking for file to save
:param allowMultiple: True if should allow multiple files to be selected. Ignored if isSave == True
:param exts: Extensions to allow in the file dialog. Can be a single string or a list of them.
Use "*" to add an option that allows all files to be selected
:returns: A string with the selected filepath or an array of them, depending on whether allowMultiple is True of False
'''
msg = msg or 'Select file'
caller = _callerName().split(".")
name = "/".join([LAST_PATH, caller[-1]])
namespace = caller[0]
path = pluginSetting(name, namespace)
f = None
if not isinstance(exts, list):
exts = [exts]
extString = ";; ".join([" %s files (*.%s)" % (e.upper(), e) if e != "*" else "All files (*.*)" for e in exts])
if allowMultiple:
ret = QtWidgets.QFileDialog.getOpenFileNames(parent, msg, path, '*.' + extString)
if ret:
f = ret[0]
else:
f = ret = None
else:
if isSave:
ret = QtWidgets.QFileDialog.getSaveFileName(parent, msg, path, '*.' + extString) or None
if ret is not None and not ret.endswith(exts[0]):
ret += "." + exts[0]
else:
ret = QtWidgets.QFileDialog.getOpenFileName(parent, msg , path, '*.' + extString) or None
f = ret
if f is not None:
setPluginSetting(name, os.path.dirname(f), namespace)
return ret |
Asks for a folder, opening the corresponding dialog with the last path that was selected
when this same function was invoked from the calling method
:param parent: The parent window
:param msg: The message to use for the dialog title
def askForFolder(parent, msg = None):
'''
Asks for a folder, opening the corresponding dialog with the last path that was selected
when this same function was invoked from the calling method
:param parent: The parent window
:param msg: The message to use for the dialog title
'''
msg = msg or 'Select folder'
caller = _callerName().split(".")
name = "/".join([LAST_PATH, caller[-1]])
namespace = caller[0]
path = pluginSetting(name, namespace)
folder = QtWidgets.QFileDialog.getExistingDirectory(parent, msg, path)
if folder:
setPluginSetting(name, folder, namespace)
return folder |
Executes a lengthy tasks in a separate thread and displays a waiting dialog if needed.
Sets the cursor to wait cursor while the task is running.
This function does not provide any support for progress indication
:param func: The function to execute.
:param message: The message to display in the wait dialog. If not passed, the dialog won't be shown
def execute(func, message = None):
'''
Executes a lengthy tasks in a separate thread and displays a waiting dialog if needed.
Sets the cursor to wait cursor while the task is running.
This function does not provide any support for progress indication
:param func: The function to execute.
:param message: The message to display in the wait dialog. If not passed, the dialog won't be shown
'''
global _dialog
cursor = QtWidgets.QApplication.overrideCursor()
waitCursor = (cursor is not None and cursor.shape() == QtCore.Qt.WaitCursor)
dialogCreated = False
try:
QtCore.QCoreApplication.processEvents()
if not waitCursor:
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))
if message is not None:
t = ExecutorThread(func)
loop = QtCore.QEventLoop()
t.finished.connect(loop.exit, QtCore.Qt.QueuedConnection)
if _dialog is None:
dialogCreated = True
_dialog = QtGui.QProgressDialog(message, "Running", 0, 0, iface.mainWindow())
_dialog.setWindowTitle("Running")
_dialog.setWindowModality(QtCore.Qt.WindowModal);
_dialog.setMinimumDuration(1000)
_dialog.setMaximum(100)
_dialog.setValue(0)
_dialog.setMaximum(0)
_dialog.setCancelButton(None)
else:
oldText = _dialog.labelText()
_dialog.setLabelText(message)
QtWidgets.QApplication.processEvents()
t.start()
loop.exec_(flags = QtCore.QEventLoop.ExcludeUserInputEvents)
if t.exception is not None:
raise t.exception
return t.returnValue
else:
return func()
finally:
if message is not None:
if dialogCreated:
_dialog.reset()
_dialog = None
else:
_dialog.setLabelText(oldText)
if not waitCursor:
QtWidgets.QApplication.restoreOverrideCursor()
QtCore.QCoreApplication.processEvents() |
Context manager used to temporarily disable auto_sync.
This is useful when performing bulk updates on objects - when
you may not want to flood the indexing process.
>>> with disable_search_updates():
... for obj in model.objects.all():
... obj.save()
The function works by temporarily removing the apps._on_model_save
signal handler from the model.post_save signal receivers, and then
restoring them after.
def disable_search_updates():
"""
Context manager used to temporarily disable auto_sync.
This is useful when performing bulk updates on objects - when
you may not want to flood the indexing process.
>>> with disable_search_updates():
... for obj in model.objects.all():
... obj.save()
The function works by temporarily removing the apps._on_model_save
signal handler from the model.post_save signal receivers, and then
restoring them after.
"""
_receivers = signals.post_save.receivers.copy()
signals.post_save.receivers = _strip_on_model_save()
yield
signals.post_save.receivers = _receivers |
Make a network request by calling QgsNetworkAccessManager.
redirections argument is ignored and is here only for httplib2 compatibility.
def request(self, url, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None, blocking=True):
"""
Make a network request by calling QgsNetworkAccessManager.
redirections argument is ignored and is here only for httplib2 compatibility.
"""
self.msg_log(u'http_call request: {0}'.format(url))
self.blocking_mode = blocking
req = QNetworkRequest()
# Avoid double quoting form QUrl
url = urllib.parse.unquote(url)
req.setUrl(QUrl(url))
if headers is not None:
# This fixes a wierd error with compressed content not being correctly
# inflated.
# If you set the header on the QNetworkRequest you are basically telling
# QNetworkAccessManager "I know what I'm doing, please don't do any content
# encoding processing".
# See: https://bugs.webkit.org/show_bug.cgi?id=63696#c1
try:
del headers['Accept-Encoding']
except KeyError:
pass
for k, v in list(headers.items()):
self.msg_log("Setting header %s to %s" % (k, v))
req.setRawHeader(k.encode(), v.encode())
if self.authid:
self.msg_log("Update request w/ authid: {0}".format(self.authid))
self.auth_manager().updateNetworkRequest(req, self.authid)
if self.reply is not None and self.reply.isRunning():
self.reply.close()
if method.lower() == 'delete':
func = getattr(QgsNetworkAccessManager.instance(), 'deleteResource')
else:
func = getattr(QgsNetworkAccessManager.instance(), method.lower())
# Calling the server ...
# Let's log the whole call for debugging purposes:
self.msg_log("Sending %s request to %s" % (method.upper(), req.url().toString()))
self.on_abort = False
headers = {str(h): str(req.rawHeader(h)) for h in req.rawHeaderList()}
for k, v in list(headers.items()):
self.msg_log("%s: %s" % (k, v))
if method.lower() in ['post', 'put']:
if isinstance(body, io.IOBase):
body = body.read()
if isinstance(body, str):
body = body.encode()
self.reply = func(req, body)
else:
self.reply = func(req)
if self.authid:
self.msg_log("Update reply w/ authid: {0}".format(self.authid))
self.auth_manager().updateNetworkReply(self.reply, self.authid)
# necessary to trap local timout manage by QgsNetworkAccessManager
# calling QgsNetworkAccessManager::abortRequest
QgsNetworkAccessManager.instance().requestTimedOut.connect(self.requestTimedOut)
self.reply.sslErrors.connect(self.sslErrors)
self.reply.finished.connect(self.replyFinished)
self.reply.downloadProgress.connect(self.downloadProgress)
# block if blocking mode otherwise return immediatly
# it's up to the caller to manage listeners in case of no blocking mode
if not self.blocking_mode:
return (None, None)
# Call and block
self.el = QEventLoop()
self.reply.finished.connect(self.el.quit)
# Catch all exceptions (and clean up requests)
try:
self.el.exec_(QEventLoop.ExcludeUserInputEvents)
except Exception as e:
raise e
if self.reply:
self.reply.finished.disconnect(self.el.quit)
# emit exception in case of error
if not self.http_call_result.ok:
if self.http_call_result.exception and not self.exception_class:
raise self.http_call_result.exception
else:
raise self.exception_class(self.http_call_result.reason)
return (self.http_call_result, self.http_call_result.content) |
Trap the timeout. In Async mode requestTimedOut is called after replyFinished
def requestTimedOut(self, reply):
"""Trap the timeout. In Async mode requestTimedOut is called after replyFinished"""
# adapt http_call_result basing on receiving qgs timer timout signal
self.exception_class = RequestsExceptionTimeout
self.http_call_result.exception = RequestsExceptionTimeout("Timeout error") |
Handle SSL errors, logging them if debug is on and ignoring them
if disable_ssl_certificate_validation is set.
def sslErrors(self, ssl_errors):
"""
Handle SSL errors, logging them if debug is on and ignoring them
if disable_ssl_certificate_validation is set.
"""
if ssl_errors:
for v in ssl_errors:
self.msg_log("SSL Error: %s" % v.errorString())
if self.disable_ssl_certificate_validation:
self.reply.ignoreSslErrors() |
Handle request to cancel HTTP call
def abort(self):
"""
Handle request to cancel HTTP call
"""
if (self.reply and self.reply.isRunning()):
self.on_abort = True
self.reply.abort() |
Return all the loaded layers. Filters by name (optional) first and then type (optional)
:param name: (optional) name of layer to return..
:param type: (optional) The QgsMapLayer type of layer to return. Accepts a single value or a list of them
:return: List of loaded layers. If name given will return all layers with matching name.
def mapLayers(name=None, types=None):
"""
Return all the loaded layers. Filters by name (optional) first and then type (optional)
:param name: (optional) name of layer to return..
:param type: (optional) The QgsMapLayer type of layer to return. Accepts a single value or a list of them
:return: List of loaded layers. If name given will return all layers with matching name.
"""
if types is not None and not isinstance(types, list):
types = [types]
layers = _layerreg.mapLayers().values()
_layers = []
if name or types:
if name:
_layers = [layer for layer in layers if re.match(name, layer.name())]
if types:
_layers += [layer for layer in layers if layer.type() in types]
return _layers
else:
return layers |
Add one or several layers to the QGIS session and layer registry.
:param layer: The layer object or list with layers to add the QGIS layer registry and session.
:param loadInLegend: True if this layer should be added to the legend.
:return: The added layer
def addLayer(layer, loadInLegend=True):
"""
Add one or several layers to the QGIS session and layer registry.
:param layer: The layer object or list with layers to add the QGIS layer registry and session.
:param loadInLegend: True if this layer should be added to the legend.
:return: The added layer
"""
if not hasattr(layer, "__iter__"):
layer = [layer]
_layerreg.addMapLayers(layer, loadInLegend)
return layer |
Tries to add a layer from layer object
Same as the addLayer method, but it does not ask for CRS, regardless of current
configuration in QGIS settings
def addLayerNoCrsDialog(layer, loadInLegend=True):
'''
Tries to add a layer from layer object
Same as the addLayer method, but it does not ask for CRS, regardless of current
configuration in QGIS settings
'''
settings = QSettings()
prjSetting = settings.value('/Projections/defaultBehaviour')
settings.setValue('/Projections/defaultBehaviour', '')
# QGIS3
prjSetting3 = settings.value('/Projections/defaultBehavior')
settings.setValue('/Projections/defaultBehavior', '')
layer = addLayer(layer, loadInLegend)
settings.setValue('/Projections/defaultBehaviour', prjSetting)
settings.setValue('/Projections/defaultBehavior', prjSetting3)
return layer |
Creates a new vector layer
:param filename: The filename to store the file. The extensions determines the type of file.
If extension is not among the supported ones, a shapefile will be created and the file will
get an added '.shp' to its path.
If the filename is None, a memory layer will be created
:param fields: the fields to add to the layer. Accepts a QgsFields object or a list of tuples (field_name, field_type)
Accepted field types are basic Python types str, float, int and bool
:param geometryType: The type of geometry of the layer to create.
:param crs: The crs of the layer to create. Accepts a QgsCoordinateSystem object or a string with the CRS authId.
:param encoding: The layer encoding
def newVectorLayer(filename, fields, geometryType, crs, encoding="utf-8"):
'''
Creates a new vector layer
:param filename: The filename to store the file. The extensions determines the type of file.
If extension is not among the supported ones, a shapefile will be created and the file will
get an added '.shp' to its path.
If the filename is None, a memory layer will be created
:param fields: the fields to add to the layer. Accepts a QgsFields object or a list of tuples (field_name, field_type)
Accepted field types are basic Python types str, float, int and bool
:param geometryType: The type of geometry of the layer to create.
:param crs: The crs of the layer to create. Accepts a QgsCoordinateSystem object or a string with the CRS authId.
:param encoding: The layer encoding
'''
if isinstance(crs, basestring):
crs = QgsCoordinateReferenceSystem(crs)
if filename is None:
uri = GEOM_TYPE_MAP[geometryType]
if crs.isValid():
uri += '?crs=' + crs.authid() + '&'
fieldsdesc = ['field=' + f for f in fields]
fieldsstring = '&'.join(fieldsdesc)
uri += fieldsstring
layer = QgsVectorLayer(uri, "mem_layer", 'memory')
else:
formats = QgsVectorFileWriter.supportedFiltersAndFormats()
OGRCodes = {}
for (key, value) in formats.items():
extension = unicode(key)
extension = extension[extension.find('*.') + 2:]
extension = extension[:extension.find(' ')]
OGRCodes[extension] = value
extension = os.path.splitext(filename)[1][1:]
if extension not in OGRCodes:
extension = 'shp'
filename = filename + '.shp'
if isinstance(fields, QgsFields):
qgsfields = fields
else:
qgsfields = QgsFields()
for field in fields:
qgsfields.append(_toQgsField(field))
QgsVectorFileWriter(filename, encoding, qgsfields,
geometryType, crs, OGRCodes[extension])
layer = QgsVectorLayer(filename, os.path.basename(filename), 'ogr')
return layer |
Returns the layer from the current project with the passed name
Raises WrongLayerNameException if no layer with that name is found
If several layers with that name exist, only the first one is returned
def layerFromName(name):
'''
Returns the layer from the current project with the passed name
Raises WrongLayerNameException if no layer with that name is found
If several layers with that name exist, only the first one is returned
'''
layers =_layerreg.mapLayers().values()
for layer in layers:
if layer.name() == name:
return layer
raise WrongLayerNameException() |
Returns the layer from the current project with the passed source
Raises WrongLayerSourceException if no layer with that source is found
def layerFromSource(source):
'''
Returns the layer from the current project with the passed source
Raises WrongLayerSourceException if no layer with that source is found
'''
layers =_layerreg.mapLayers().values()
for layer in layers:
if layer.source() == source:
return layer
raise WrongLayerSourceException() |
Tries to load a layer from the given file
:param filename: the path to the file to load.
:param name: the name to use for adding the layer to the current project.
If not passed or None, it will use the filename basename
def loadLayer(filename, name = None, provider=None):
'''
Tries to load a layer from the given file
:param filename: the path to the file to load.
:param name: the name to use for adding the layer to the current project.
If not passed or None, it will use the filename basename
'''
name = name or os.path.splitext(os.path.basename(filename))[0]
if provider != 'gdal': # QGIS3 crashes if opening a raster as vector ... this needs further investigations
qgslayer = QgsVectorLayer(filename, name, provider or "ogr")
if provider == 'gdal' or not qgslayer.isValid():
qgslayer = QgsRasterLayer(filename, name, provider or "gdal")
if not qgslayer.isValid():
raise RuntimeError('Could not load layer: ' + unicode(filename))
return qgslayer |
Tries to load a layer from the given file
Same as the loadLayer method, but it does not ask for CRS, regardless of current
configuration in QGIS settings
def loadLayerNoCrsDialog(filename, name=None, provider=None):
'''
Tries to load a layer from the given file
Same as the loadLayer method, but it does not ask for CRS, regardless of current
configuration in QGIS settings
'''
settings = QSettings()
prjSetting = settings.value('/Projections/defaultBehaviour')
settings.setValue('/Projections/defaultBehaviour', '')
# QGIS3:
prjSetting3 = settings.value('/Projections/defaultBehavior')
settings.setValue('/Projections/defaultBehavior', '')
layer = loadLayer(filename, name, provider)
settings.setValue('/Projections/defaultBehaviour', prjSetting)
settings.setValue('/Projections/defaultBehavior', prjSetting3)
return layer |
Adds a 'open settings...' menu to the plugin menu.
This method should be called from the initGui() method of the plugin
:param menuName: The name of the plugin menu in which the settings menu is to be added
:param parentMenuFunction: a function from QgisInterface to indicate where to put the container plugin menu.
If not passed, it uses addPluginToMenu
def addSettingsMenu(menuName, parentMenuFunction=None):
'''
Adds a 'open settings...' menu to the plugin menu.
This method should be called from the initGui() method of the plugin
:param menuName: The name of the plugin menu in which the settings menu is to be added
:param parentMenuFunction: a function from QgisInterface to indicate where to put the container plugin menu.
If not passed, it uses addPluginToMenu
'''
parentMenuFunction = parentMenuFunction or iface.addPluginToMenu
namespace = _callerName().split(".")[0]
settingsAction = QAction(
QgsApplication.getThemeIcon('/mActionOptions.svg'),
"Plugin Settings...",
iface.mainWindow())
settingsAction.setObjectName(namespace + "settings")
settingsAction.triggered.connect(lambda: openSettingsDialog(namespace))
parentMenuFunction(menuName, settingsAction)
global _settingActions
_settingActions[menuName] = settingsAction |
Opens a dialog to enter parameters.
Parameters are passed as a list of Parameter objects
Returns a dict with param names as keys and param values as values
Returns None if the dialog was cancelled
def openParametersDialog(params, title=None):
'''
Opens a dialog to enter parameters.
Parameters are passed as a list of Parameter objects
Returns a dict with param names as keys and param values as values
Returns None if the dialog was cancelled
'''
QApplication.setOverrideCursor(QCursor(Qt.ArrowCursor))
dlg = ParametersDialog(params, title)
dlg.exec_()
QApplication.restoreOverrideCursor()
return dlg.values |
Rebuild search index.
def do_index_command(self, index, **options):
"""Rebuild search index."""
if options["interactive"]:
logger.warning("This will permanently delete the index '%s'.", index)
if not self._confirm_action():
logger.warning(
"Aborting rebuild of index '%s' at user's request.", index
)
return
try:
delete = delete_index(index)
except TransportError:
delete = {}
logger.info("Index %s does not exist, cannot be deleted.", index)
create = create_index(index)
update = update_index(index)
return {"delete": delete, "create": create, "update": update} |
Run do_index_command on each specified index and log the output.
def handle(self, *args, **options):
"""Run do_index_command on each specified index and log the output."""
for index in options.pop("indexes"):
data = {}
try:
data = self.do_index_command(index, **options)
except TransportError as ex:
logger.warning("ElasticSearch threw an error: %s", ex)
data = {"index": index, "status": ex.status_code, "reason": ex.error}
finally:
logger.info(data) |
Creates a template.
:param Name: Name of template
:param Subject: The content to use for the Subject when this template is used to send email.
:param HtmlBody: The content to use for the HtmlBody when this template is used to send email.
:param TextBody: The content to use for the HtmlBody when this template is used to send email.
:return:
def create(self, Name, Subject, HtmlBody=None, TextBody=None, Alias=None):
"""
Creates a template.
:param Name: Name of template
:param Subject: The content to use for the Subject when this template is used to send email.
:param HtmlBody: The content to use for the HtmlBody when this template is used to send email.
:param TextBody: The content to use for the HtmlBody when this template is used to send email.
:return:
"""
assert TextBody or HtmlBody, "Provide either email TextBody or HtmlBody or both"
data = {"Name": Name, "Subject": Subject, "HtmlBody": HtmlBody, "TextBody": TextBody, "Alias": Alias}
return self._init_instance(self.call("POST", "/templates", data=data)) |
Returns simple console logger.
def get_logger(name, verbosity, stream):
"""
Returns simple console logger.
"""
logger = logging.getLogger(name)
logger.setLevel(
{0: DEFAULT_LOGGING_LEVEL, 1: logging.INFO, 2: logging.DEBUG}.get(min(2, verbosity), DEFAULT_LOGGING_LEVEL)
)
logger.handlers = []
handler = logging.StreamHandler(stream)
handler.setLevel(logging.DEBUG)
handler.setFormatter(logging.Formatter(LOG_FORMAT))
logger.addHandler(handler)
return logger |
Helper method for instantiating PostmarkClient from dict-like objects.
def from_config(cls, config, prefix="postmark_", is_uppercase=False):
"""
Helper method for instantiating PostmarkClient from dict-like objects.
"""
kwargs = {}
for arg in get_args(cls):
key = prefix + arg
if is_uppercase:
key = key.upper()
else:
key = key.lower()
if key in config:
kwargs[arg] = config[key]
return cls(**kwargs) |
Split a container into n-sized chunks.
def chunks(container, n):
"""
Split a container into n-sized chunks.
"""
for i in range(0, len(container), n):
yield container[i : i + n] |
Helper to iterate over remote data via count & offset pagination.
def sizes(count, offset=0, max_chunk=500):
"""
Helper to iterate over remote data via count & offset pagination.
"""
if count is None:
chunk = max_chunk
while True:
yield chunk, offset
offset += chunk
else:
while count:
chunk = min(count, max_chunk)
count = max(0, count - max_chunk)
yield chunk, offset
offset += chunk |
Constructs appropriate exception from list of responses and raises it.
def raise_for_response(self, responses):
"""
Constructs appropriate exception from list of responses and raises it.
"""
exception_messages = [self.client.format_exception_message(response) for response in responses]
if len(exception_messages) == 1:
message = exception_messages[0]
else:
message = "[%s]" % ", ".join(exception_messages)
raise PostmarkerException(message) |
Converts list to string with comma separated values. For string is no-op.
def list_to_csv(value):
"""
Converts list to string with comma separated values. For string is no-op.
"""
if isinstance(value, (list, tuple, set)):
value = ",".join(value)
return value |
Converts incoming attachment into dictionary.
def prepare_attachments(attachment):
"""
Converts incoming attachment into dictionary.
"""
if isinstance(attachment, tuple):
result = {"Name": attachment[0], "Content": attachment[1], "ContentType": attachment[2]}
if len(attachment) == 4:
result["ContentID"] = attachment[3]
elif isinstance(attachment, MIMEBase):
payload = attachment.get_payload()
content_type = attachment.get_content_type()
# Special case for message/rfc822
# Even if RFC implies such attachments being not base64-encoded,
# Postmark requires all attachments to be encoded in this way
if content_type == "message/rfc822" and not isinstance(payload, str):
payload = b64encode(payload[0].get_payload(decode=True)).decode()
result = {
"Name": attachment.get_filename() or "attachment.txt",
"Content": payload,
"ContentType": content_type,
}
content_id = attachment.get("Content-ID")
if content_id:
if content_id.startswith("<") and content_id.endswith(">"):
content_id = content_id[1:-1]
if (attachment.get("Content-Disposition") or "").startswith("inline"):
content_id = "cid:%s" % content_id
result["ContentID"] = content_id
elif isinstance(attachment, str):
content_type = guess_content_type(attachment)
filename = os.path.basename(attachment)
with open(attachment, "rb") as fd:
data = fd.read()
result = {"Name": filename, "Content": b64encode(data).decode("utf-8"), "ContentType": content_type}
else:
result = attachment
return result |
Additionally encodes headers.
:return:
def as_dict(self):
"""
Additionally encodes headers.
:return:
"""
data = super(BaseEmail, self).as_dict()
data["Headers"] = [{"Name": name, "Value": value} for name, value in data["Headers"].items()]
for field in ("To", "Cc", "Bcc"):
if field in data:
data[field] = list_to_csv(data[field])
data["Attachments"] = [prepare_attachments(attachment) for attachment in data["Attachments"]]
return data |
Attaches given binary data.
:param bytes content: Binary data to be attached.
:param str filename:
:return: None.
def attach_binary(self, content, filename):
"""
Attaches given binary data.
:param bytes content: Binary data to be attached.
:param str filename:
:return: None.
"""
content_type = guess_content_type(filename)
payload = {"Name": filename, "Content": b64encode(content).decode("utf-8"), "ContentType": content_type}
self.attach(payload) |
Instantiates ``Email`` instance from ``MIMEText`` instance.
:param message: ``email.mime.text.MIMEText`` instance.
:param manager: :py:class:`EmailManager` instance.
:return: :py:class:`Email`
def from_mime(cls, message, manager):
"""
Instantiates ``Email`` instance from ``MIMEText`` instance.
:param message: ``email.mime.text.MIMEText`` instance.
:param manager: :py:class:`EmailManager` instance.
:return: :py:class:`Email`
"""
text, html, attachments = deconstruct_multipart(message)
subject = prepare_header(message["Subject"])
sender = prepare_header(message["From"])
to = prepare_header(message["To"])
cc = prepare_header(message["Cc"])
bcc = prepare_header(message["Bcc"])
reply_to = prepare_header(message["Reply-To"])
tag = getattr(message, "tag", None)
return cls(
manager=manager,
From=sender,
To=to,
TextBody=text,
HtmlBody=html,
Subject=subject,
Cc=cc,
Bcc=bcc,
ReplyTo=reply_to,
Attachments=attachments,
Tag=tag,
) |
Converts all available emails to dictionaries.
:return: List of dictionaries.
def as_dict(self, **extra):
"""
Converts all available emails to dictionaries.
:return: List of dictionaries.
"""
return [self._construct_email(email, **extra) for email in self.emails] |
Converts incoming data to properly structured dictionary.
def _construct_email(self, email, **extra):
"""
Converts incoming data to properly structured dictionary.
"""
if isinstance(email, dict):
email = Email(manager=self._manager, **email)
elif isinstance(email, (MIMEText, MIMEMultipart)):
email = Email.from_mime(email, self._manager)
elif not isinstance(email, Email):
raise ValueError
email._update(extra)
return email.as_dict() |
Sends email batch.
:return: Information about sent emails.
:rtype: `list`
def send(self, **extra):
"""
Sends email batch.
:return: Information about sent emails.
:rtype: `list`
"""
emails = self.as_dict(**extra)
responses = [self._manager._send_batch(*batch) for batch in chunks(emails, self.MAX_SIZE)]
return sum(responses, []) |
Sends a single email.
:param message: :py:class:`Email` or ``email.mime.text.MIMEText`` instance.
:param str From: The sender email address.
:param To: Recipient's email address.
Multiple recipients could be specified as a list or string with comma separated values.
:type To: str or list
:param Cc: Cc recipient's email address.
Multiple Cc recipients could be specified as a list or string with comma separated values.
:type Cc: str or list
:param Bcc: Bcc recipient's email address.
Multiple Bcc recipients could be specified as a list or string with comma separated values.
:type Bcc: str or list
:param str Subject: Email subject.
:param str Tag: Email tag.
:param str HtmlBody: HTML email message.
:param str TextBody: Plain text email message.
:param str ReplyTo: Reply To override email address.
:param dict Headers: Dictionary of custom headers to include.
:param bool TrackOpens: Activate open tracking for this email.
:param str TrackLinks: Activate link tracking for links in the HTML or Text bodies of this email.
:param list Attachments: List of attachments.
:return: Information about sent email.
:rtype: `dict`
def send(
self,
message=None,
From=None,
To=None,
Cc=None,
Bcc=None,
Subject=None,
Tag=None,
HtmlBody=None,
TextBody=None,
Metadata=None,
ReplyTo=None,
Headers=None,
TrackOpens=None,
TrackLinks="None",
Attachments=None,
):
"""
Sends a single email.
:param message: :py:class:`Email` or ``email.mime.text.MIMEText`` instance.
:param str From: The sender email address.
:param To: Recipient's email address.
Multiple recipients could be specified as a list or string with comma separated values.
:type To: str or list
:param Cc: Cc recipient's email address.
Multiple Cc recipients could be specified as a list or string with comma separated values.
:type Cc: str or list
:param Bcc: Bcc recipient's email address.
Multiple Bcc recipients could be specified as a list or string with comma separated values.
:type Bcc: str or list
:param str Subject: Email subject.
:param str Tag: Email tag.
:param str HtmlBody: HTML email message.
:param str TextBody: Plain text email message.
:param str ReplyTo: Reply To override email address.
:param dict Headers: Dictionary of custom headers to include.
:param bool TrackOpens: Activate open tracking for this email.
:param str TrackLinks: Activate link tracking for links in the HTML or Text bodies of this email.
:param list Attachments: List of attachments.
:return: Information about sent email.
:rtype: `dict`
"""
assert not (message and (From or To)), "You should specify either message or From and To parameters"
assert TrackLinks in ("None", "HtmlAndText", "HtmlOnly", "TextOnly")
if message is None:
message = self.Email(
From=From,
To=To,
Cc=Cc,
Bcc=Bcc,
Subject=Subject,
Tag=Tag,
HtmlBody=HtmlBody,
TextBody=TextBody,
Metadata=Metadata,
ReplyTo=ReplyTo,
Headers=Headers,
TrackOpens=TrackOpens,
TrackLinks=TrackLinks,
Attachments=Attachments,
)
elif isinstance(message, (MIMEText, MIMEMultipart)):
message = Email.from_mime(message, self)
elif not isinstance(message, Email):
raise TypeError("message should be either Email or MIMEText or MIMEMultipart instance")
return message.send() |
Constructs :py:class:`Email` instance.
:return: :py:class:`Email`
def Email(
self,
From,
To,
Cc=None,
Bcc=None,
Subject=None,
Tag=None,
HtmlBody=None,
TextBody=None,
Metadata=None,
ReplyTo=None,
Headers=None,
TrackOpens=None,
TrackLinks="None",
Attachments=None,
):
"""
Constructs :py:class:`Email` instance.
:return: :py:class:`Email`
"""
return Email(
manager=self,
From=From,
To=To,
Cc=Cc,
Bcc=Bcc,
Subject=Subject,
Tag=Tag,
HtmlBody=HtmlBody,
TextBody=TextBody,
Metadata=Metadata,
ReplyTo=ReplyTo,
Headers=Headers,
TrackOpens=TrackOpens,
TrackLinks=TrackLinks,
Attachments=Attachments,
) |
Constructs :py:class:`EmailTemplate` instance.
:return: :py:class:`EmailTemplate`
def EmailTemplate(
self,
TemplateId,
TemplateModel,
From,
To,
TemplateAlias=None,
Cc=None,
Bcc=None,
Subject=None,
Tag=None,
ReplyTo=None,
Headers=None,
TrackOpens=None,
TrackLinks="None",
Attachments=None,
InlineCss=True,
):
"""
Constructs :py:class:`EmailTemplate` instance.
:return: :py:class:`EmailTemplate`
"""
return EmailTemplate(
manager=self,
TemplateId=TemplateId,
TemplateAlias=TemplateAlias,
TemplateModel=TemplateModel,
From=From,
To=To,
Cc=Cc,
Bcc=Bcc,
Subject=Subject,
Tag=Tag,
ReplyTo=ReplyTo,
Headers=Headers,
TrackOpens=TrackOpens,
TrackLinks=TrackLinks,
Attachments=Attachments,
InlineCss=InlineCss,
) |
Activates the bounce instance and updates it with the latest data.
:return: Activation status.
:rtype: `str`
def activate(self):
"""
Activates the bounce instance and updates it with the latest data.
:return: Activation status.
:rtype: `str`
"""
response = self._manager.activate(self.ID)
self._update(response["Bounce"])
return response["Message"] |
Returns many bounces.
:param int count: Number of bounces to return per request.
:param int offset: Number of bounces to skip.
:param str type: Filter by type of bounce.
:param bool inactive: Filter by emails that were deactivated by Postmark due to the bounce.
:param str emailFilter: Filter by email address.
:param str tag: Filter by tag.
:param str messageID: Filter by messageID.
:param date fromdate: Filter messages starting from the date specified (inclusive).
:param date todate: Filter messages up to the date specified (inclusive).
:return: A list of :py:class:`Bounce` instances.
:rtype: `list`
def all(
self,
count=500,
offset=0,
type=None,
inactive=None,
emailFilter=None,
tag=None,
messageID=None,
fromdate=None,
todate=None,
):
"""
Returns many bounces.
:param int count: Number of bounces to return per request.
:param int offset: Number of bounces to skip.
:param str type: Filter by type of bounce.
:param bool inactive: Filter by emails that were deactivated by Postmark due to the bounce.
:param str emailFilter: Filter by email address.
:param str tag: Filter by tag.
:param str messageID: Filter by messageID.
:param date fromdate: Filter messages starting from the date specified (inclusive).
:param date todate: Filter messages up to the date specified (inclusive).
:return: A list of :py:class:`Bounce` instances.
:rtype: `list`
"""
responses = self.call_many(
"GET",
"/bounces/",
count=count,
offset=offset,
type=type,
inactive=inactive,
emailFilter=emailFilter,
tag=tag,
messageID=messageID,
fromdate=fromdate,
todate=todate,
)
return self.expand_responses(responses, "Bounces") |
Helper to support handy dictionaries merging on all Python versions.
def update_kwargs(self, kwargs, count, offset):
"""
Helper to support handy dictionaries merging on all Python versions.
"""
kwargs.update({self.count_key: count, self.offset_key: offset})
return kwargs |
Gets a brief overview of statistics for all of your outbound email.
def overview(self, tag=None, fromdate=None, todate=None):
"""
Gets a brief overview of statistics for all of your outbound email.
"""
return self.call("GET", "/stats/outbound", tag=tag, fromdate=fromdate, todate=todate) |
Gets a total count of emails you’ve sent out.
def sends(self, tag=None, fromdate=None, todate=None):
"""
Gets a total count of emails you’ve sent out.
"""
return self.call("GET", "/stats/outbound/sends", tag=tag, fromdate=fromdate, todate=todate) |
Gets total counts of emails you’ve sent out that have been returned as bounced.
def bounces(self, tag=None, fromdate=None, todate=None):
"""
Gets total counts of emails you’ve sent out that have been returned as bounced.
"""
return self.call("GET", "/stats/outbound/bounces", tag=tag, fromdate=fromdate, todate=todate) |
Gets a total count of recipients who have marked your email as spam.
def spam(self, tag=None, fromdate=None, todate=None):
"""
Gets a total count of recipients who have marked your email as spam.
"""
return self.call("GET", "/stats/outbound/spam", tag=tag, fromdate=fromdate, todate=todate) |
Gets a total count of emails you’ve sent with open tracking or link tracking enabled.
def tracked(self, tag=None, fromdate=None, todate=None):
"""
Gets a total count of emails you’ve sent with open tracking or link tracking enabled.
"""
return self.call("GET", "/stats/outbound/tracked", tag=tag, fromdate=fromdate, todate=todate) |
Gets total counts of recipients who opened your emails.
This is only recorded when open tracking is enabled for that email.
def opens(self, tag=None, fromdate=None, todate=None):
"""
Gets total counts of recipients who opened your emails.
This is only recorded when open tracking is enabled for that email.
"""
return self.call("GET", "/stats/outbound/opens", tag=tag, fromdate=fromdate, todate=todate) |
Gets an overview of the platforms used to open your emails.
This is only recorded when open tracking is enabled for that email.
def opens_platforms(self, tag=None, fromdate=None, todate=None):
"""
Gets an overview of the platforms used to open your emails.
This is only recorded when open tracking is enabled for that email.
"""
return self.call("GET", "/stats/outbound/opens/platforms", tag=tag, fromdate=fromdate, todate=todate) |
Gets an overview of the email clients used to open your emails.
This is only recorded when open tracking is enabled for that email.
def emailclients(self, tag=None, fromdate=None, todate=None):
"""
Gets an overview of the email clients used to open your emails.
This is only recorded when open tracking is enabled for that email.
"""
return self.call("GET", "/stats/outbound/opens/emailclients", tag=tag, fromdate=fromdate, todate=todate) |
Gets the length of time that recipients read emails along with counts for each time.
This is only recorded when open tracking is enabled for that email.
Read time tracking stops at 20 seconds, so any read times above that will appear in the 20s+ field.
def readtimes(self, tag=None, fromdate=None, todate=None):
"""
Gets the length of time that recipients read emails along with counts for each time.
This is only recorded when open tracking is enabled for that email.
Read time tracking stops at 20 seconds, so any read times above that will appear in the 20s+ field.
"""
return self.call("GET", "/stats/outbound/opens/readtimes", tag=tag, fromdate=fromdate, todate=todate) |
Gets total counts of unique links that were clicked.
def clicks(self, tag=None, fromdate=None, todate=None):
"""
Gets total counts of unique links that were clicked.
"""
return self.call("GET", "/stats/outbound/clicks", tag=tag, fromdate=fromdate, todate=todate) |
Gets an overview of the browsers used to open links in your emails.
This is only recorded when Link Tracking is enabled for that email.
def browserfamilies(self, tag=None, fromdate=None, todate=None):
"""
Gets an overview of the browsers used to open links in your emails.
This is only recorded when Link Tracking is enabled for that email.
"""
return self.call("GET", "/stats/outbound/clicks/browserfamilies", tag=tag, fromdate=fromdate, todate=todate) |
Gets an overview of the browser platforms used to open your emails.
This is only recorded when Link Tracking is enabled for that email.
def clicks_platforms(self, tag=None, fromdate=None, todate=None):
"""
Gets an overview of the browser platforms used to open your emails.
This is only recorded when Link Tracking is enabled for that email.
"""
return self.call("GET", "/stats/outbound/clicks/platforms", tag=tag, fromdate=fromdate, todate=todate) |
Gets an overview of which part of the email links were clicked from (HTML or Text).
This is only recorded when Link Tracking is enabled for that email.
def location(self, tag=None, fromdate=None, todate=None):
"""
Gets an overview of which part of the email links were clicked from (HTML or Text).
This is only recorded when Link Tracking is enabled for that email.
"""
return self.call("GET", "/stats/outbound/clicks/location", tag=tag, fromdate=fromdate, todate=todate) |
Return the global line rate of the coverage report. If the
`filename` file is given, return the line rate of the file.
def line_rate(self, filename=None):
"""
Return the global line rate of the coverage report. If the
`filename` file is given, return the line rate of the file.
"""
if filename is None:
el = self.xml
else:
el = self._get_class_element_by_filename(filename)
return float(el.attrib['line-rate']) |
Return the global branch rate of the coverage report. If the
`filename` file is given, return the branch rate of the file.
def branch_rate(self, filename=None):
"""
Return the global branch rate of the coverage report. If the
`filename` file is given, return the branch rate of the file.
"""
if filename is None:
el = self.xml
else:
el = self._get_class_element_by_filename(filename)
return float(el.attrib['branch-rate']) |
Return a list of uncovered line numbers for each of the missed
statements found for the file `filename`.
def missed_statements(self, filename):
"""
Return a list of uncovered line numbers for each of the missed
statements found for the file `filename`.
"""
el = self._get_class_element_by_filename(filename)
lines = el.xpath('./lines/line[@hits=0]')
return [int(l.attrib['number']) for l in lines] |
Return a list of tuples `(lineno, status)` of all the lines found in
the Cobertura report for the given file `filename` where `lineno` is
the line number and `status` is coverage status of the line which can
be either `True` (line hit) or `False` (line miss).
def line_statuses(self, filename):
"""
Return a list of tuples `(lineno, status)` of all the lines found in
the Cobertura report for the given file `filename` where `lineno` is
the line number and `status` is coverage status of the line which can
be either `True` (line hit) or `False` (line miss).
"""
line_elements = self._get_lines_by_filename(filename)
lines_w_status = []
for line in line_elements:
lineno = int(line.attrib['number'])
status = line.attrib['hits'] != '0'
lines_w_status.append((lineno, status))
return lines_w_status |
Return a list of extrapolated uncovered line numbers for the
file `filename` according to `Cobertura.line_statuses`.
def missed_lines(self, filename):
"""
Return a list of extrapolated uncovered line numbers for the
file `filename` according to `Cobertura.line_statuses`.
"""
statuses = self.line_statuses(filename)
statuses = extrapolate_coverage(statuses)
return [lno for lno, status in statuses if status is False] |
Return a list of namedtuple `Line` for each line of code found in the
source file with the given `filename`.
def file_source(self, filename):
"""
Return a list of namedtuple `Line` for each line of code found in the
source file with the given `filename`.
"""
lines = []
try:
with self.filesystem.open(filename) as f:
line_statuses = dict(self.line_statuses(filename))
for lineno, source in enumerate(f, start=1):
line_status = line_statuses.get(lineno)
line = Line(lineno, source, line_status, None)
lines.append(line)
except self.filesystem.FileNotFound as file_not_found:
lines.append(
Line(0, '%s not found' % file_not_found.path, None, None)
)
return lines |
Return the total number of uncovered statements for the file
`filename`. If `filename` is not given, return the total
number of uncovered statements for all files.
def total_misses(self, filename=None):
"""
Return the total number of uncovered statements for the file
`filename`. If `filename` is not given, return the total
number of uncovered statements for all files.
"""
if filename is not None:
return len(self.missed_statements(filename))
total = 0
for filename in self.files():
total += len(self.missed_statements(filename))
return total |
Return the total number of covered statements for the file
`filename`. If `filename` is not given, return the total
number of covered statements for all files.
def total_hits(self, filename=None):
"""
Return the total number of covered statements for the file
`filename`. If `filename` is not given, return the total
number of covered statements for all files.
"""
if filename is not None:
return len(self.hit_statements(filename))
total = 0
for filename in self.files():
total += len(self.hit_statements(filename))
return total |
Return the total number of statements for the file
`filename`. If `filename` is not given, return the total
number of statements for all files.
def total_statements(self, filename=None):
"""
Return the total number of statements for the file
`filename`. If `filename` is not given, return the total
number of statements for all files.
"""
if filename is not None:
statements = self._get_lines_by_filename(filename)
return len(statements)
total = 0
for filename in self.files():
statements = self._get_lines_by_filename(filename)
total += len(statements)
return total |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.