text stringlengths 81 112k |
|---|
API call: delete all entries in a logger via a DELETE request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs/delete
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current logger.
def delete(self, client=None):
"""API call: delete all entries in a logger via a DELETE request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs/delete
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current logger.
"""
client = self._require_client(client)
client.logging_api.logger_delete(self.project, self.name) |
Return a page of log entries.
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list
:type projects: list of strings
:param projects: project IDs to include. If not passed,
defaults to the project bound to the client.
:type filter_: str
:param filter_:
a filter expression. See
https://cloud.google.com/logging/docs/view/advanced_filters
:type order_by: str
:param order_by: One of :data:`~google.cloud.logging.ASCENDING`
or :data:`~google.cloud.logging.DESCENDING`.
:type page_size: int
:param page_size:
Optional. The maximum number of entries in each page of results
from this request. Non-positive values are ignored. Defaults
to a sensible value set by the API.
:type page_token: str
:param page_token:
Optional. If present, return the next batch of entries, using
the value, which must correspond to the ``nextPageToken`` value
returned in the previous response. Deprecated: use the ``pages``
property of the returned iterator instead of manually passing
the token.
:rtype: :class:`~google.api_core.page_iterator.Iterator`
:returns: Iterator of log entries accessible to the current logger.
See :class:`~google.cloud.logging.entries.LogEntry`.
def list_entries(
self,
projects=None,
filter_=None,
order_by=None,
page_size=None,
page_token=None,
):
"""Return a page of log entries.
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list
:type projects: list of strings
:param projects: project IDs to include. If not passed,
defaults to the project bound to the client.
:type filter_: str
:param filter_:
a filter expression. See
https://cloud.google.com/logging/docs/view/advanced_filters
:type order_by: str
:param order_by: One of :data:`~google.cloud.logging.ASCENDING`
or :data:`~google.cloud.logging.DESCENDING`.
:type page_size: int
:param page_size:
Optional. The maximum number of entries in each page of results
from this request. Non-positive values are ignored. Defaults
to a sensible value set by the API.
:type page_token: str
:param page_token:
Optional. If present, return the next batch of entries, using
the value, which must correspond to the ``nextPageToken`` value
returned in the previous response. Deprecated: use the ``pages``
property of the returned iterator instead of manually passing
the token.
:rtype: :class:`~google.api_core.page_iterator.Iterator`
:returns: Iterator of log entries accessible to the current logger.
See :class:`~google.cloud.logging.entries.LogEntry`.
"""
log_filter = "logName=%s" % (self.full_name,)
if filter_ is not None:
filter_ = "%s AND %s" % (filter_, log_filter)
else:
filter_ = log_filter
return self.client.list_entries(
projects=projects,
filter_=filter_,
order_by=order_by,
page_size=page_size,
page_token=page_token,
) |
Add a text entry to be logged during :meth:`commit`.
:type text: str
:param text: the text entry
:type kw: dict
:param kw: (optional) additional keyword arguments for the entry.
See :class:`~google.cloud.logging.entries.LogEntry`.
def log_text(self, text, **kw):
"""Add a text entry to be logged during :meth:`commit`.
:type text: str
:param text: the text entry
:type kw: dict
:param kw: (optional) additional keyword arguments for the entry.
See :class:`~google.cloud.logging.entries.LogEntry`.
"""
self.entries.append(TextEntry(payload=text, **kw)) |
Add a struct entry to be logged during :meth:`commit`.
:type info: dict
:param info: the struct entry
:type kw: dict
:param kw: (optional) additional keyword arguments for the entry.
See :class:`~google.cloud.logging.entries.LogEntry`.
def log_struct(self, info, **kw):
"""Add a struct entry to be logged during :meth:`commit`.
:type info: dict
:param info: the struct entry
:type kw: dict
:param kw: (optional) additional keyword arguments for the entry.
See :class:`~google.cloud.logging.entries.LogEntry`.
"""
self.entries.append(StructEntry(payload=info, **kw)) |
Add a protobuf entry to be logged during :meth:`commit`.
:type message: protobuf message
:param message: the protobuf entry
:type kw: dict
:param kw: (optional) additional keyword arguments for the entry.
See :class:`~google.cloud.logging.entries.LogEntry`.
def log_proto(self, message, **kw):
"""Add a protobuf entry to be logged during :meth:`commit`.
:type message: protobuf message
:param message: the protobuf entry
:type kw: dict
:param kw: (optional) additional keyword arguments for the entry.
See :class:`~google.cloud.logging.entries.LogEntry`.
"""
self.entries.append(ProtobufEntry(payload=message, **kw)) |
Send saved log entries as a single API call.
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current batch.
def commit(self, client=None):
"""Send saved log entries as a single API call.
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current batch.
"""
if client is None:
client = self.client
kwargs = {"logger_name": self.logger.full_name}
if self.resource is not None:
kwargs["resource"] = self.resource._to_dict()
if self.logger.labels is not None:
kwargs["labels"] = self.logger.labels
entries = [entry.to_api_repr() for entry in self.entries]
client.logging_api.write_entries(entries, **kwargs)
del self.entries[:] |
Fully-qualified name of this variable.
Example:
``projects/my-project/configs/my-config``
:rtype: str
:returns: The full name based on project and config names.
:raises: :class:`ValueError` if the config is missing a name.
def full_name(self):
"""Fully-qualified name of this variable.
Example:
``projects/my-project/configs/my-config``
:rtype: str
:returns: The full name based on project and config names.
:raises: :class:`ValueError` if the config is missing a name.
"""
if not self.name:
raise ValueError("Missing config name.")
return "projects/%s/configs/%s" % (self._client.project, self.name) |
Update properties from resource in body of ``api_response``
:type api_response: dict
:param api_response: response returned from an API call
def _set_properties(self, api_response):
"""Update properties from resource in body of ``api_response``
:type api_response: dict
:param api_response: response returned from an API call
"""
self._properties.clear()
cleaned = api_response.copy()
if "name" in cleaned:
self.name = config_name_from_full_name(cleaned.pop("name"))
self._properties.update(cleaned) |
API call: reload the config via a ``GET`` request.
This method will reload the newest data for the config.
See
https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs/get
:type client: :class:`google.cloud.runtimeconfig.client.Client`
:param client:
(Optional) The client to use. If not passed, falls back to the
client stored on the current config.
def reload(self, client=None):
"""API call: reload the config via a ``GET`` request.
This method will reload the newest data for the config.
See
https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs/get
:type client: :class:`google.cloud.runtimeconfig.client.Client`
:param client:
(Optional) The client to use. If not passed, falls back to the
client stored on the current config.
"""
client = self._require_client(client)
# We assume the config exists. If it doesn't it will raise a NotFound
# exception.
resp = client._connection.api_request(method="GET", path=self.path)
self._set_properties(api_response=resp) |
API call: get a variable via a ``GET`` request.
This will return None if the variable doesn't exist::
>>> from google.cloud import runtimeconfig
>>> client = runtimeconfig.Client()
>>> config = client.config('my-config')
>>> print(config.get_variable('variable-name'))
<Variable: my-config, variable-name>
>>> print(config.get_variable('does-not-exist'))
None
:type variable_name: str
:param variable_name: The name of the variable to retrieve.
:type client: :class:`~google.cloud.runtimeconfig.client.Client`
:param client:
(Optional) The client to use. If not passed, falls back to the
``client`` stored on the current config.
:rtype: :class:`google.cloud.runtimeconfig.variable.Variable` or None
:returns: The variable object if it exists, otherwise None.
def get_variable(self, variable_name, client=None):
"""API call: get a variable via a ``GET`` request.
This will return None if the variable doesn't exist::
>>> from google.cloud import runtimeconfig
>>> client = runtimeconfig.Client()
>>> config = client.config('my-config')
>>> print(config.get_variable('variable-name'))
<Variable: my-config, variable-name>
>>> print(config.get_variable('does-not-exist'))
None
:type variable_name: str
:param variable_name: The name of the variable to retrieve.
:type client: :class:`~google.cloud.runtimeconfig.client.Client`
:param client:
(Optional) The client to use. If not passed, falls back to the
``client`` stored on the current config.
:rtype: :class:`google.cloud.runtimeconfig.variable.Variable` or None
:returns: The variable object if it exists, otherwise None.
"""
client = self._require_client(client)
variable = Variable(config=self, name=variable_name)
try:
variable.reload(client=client)
return variable
except NotFound:
return None |
API call: list variables for this config.
This only lists variable names, not the values.
See
https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs.variables/list
:type page_size: int
:param page_size:
Optional. The maximum number of variables in each page of results
from this request. Non-positive values are ignored. Defaults
to a sensible value set by the API.
:type page_token: str
:param page_token:
Optional. If present, return the next batch of variables, using
the value, which must correspond to the ``nextPageToken`` value
returned in the previous response. Deprecated: use the ``pages``
property of the returned iterator instead of manually passing
the token.
:type client: :class:`~google.cloud.runtimeconfig.client.Client`
:param client:
(Optional) The client to use. If not passed, falls back to the
``client`` stored on the current config.
:rtype: :class:`~google.api_core.page_iterator.Iterator`
:returns:
Iterator of :class:`~google.cloud.runtimeconfig.variable.Variable`
belonging to this project.
def list_variables(self, page_size=None, page_token=None, client=None):
"""API call: list variables for this config.
This only lists variable names, not the values.
See
https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs.variables/list
:type page_size: int
:param page_size:
Optional. The maximum number of variables in each page of results
from this request. Non-positive values are ignored. Defaults
to a sensible value set by the API.
:type page_token: str
:param page_token:
Optional. If present, return the next batch of variables, using
the value, which must correspond to the ``nextPageToken`` value
returned in the previous response. Deprecated: use the ``pages``
property of the returned iterator instead of manually passing
the token.
:type client: :class:`~google.cloud.runtimeconfig.client.Client`
:param client:
(Optional) The client to use. If not passed, falls back to the
``client`` stored on the current config.
:rtype: :class:`~google.api_core.page_iterator.Iterator`
:returns:
Iterator of :class:`~google.cloud.runtimeconfig.variable.Variable`
belonging to this project.
"""
path = "%s/variables" % (self.path,)
client = self._require_client(client)
iterator = page_iterator.HTTPIterator(
client=client,
api_request=client._connection.api_request,
path=path,
item_to_value=_item_to_variable,
items_key="variables",
page_token=page_token,
max_results=page_size,
)
iterator._MAX_RESULTS = "pageSize"
iterator.config = self
return iterator |
Base64-decode value
def _bytes_from_json(value, field):
"""Base64-decode value"""
if _not_null(value, field):
return base64.standard_b64decode(_to_bytes(value)) |
Coerce 'value' to a datetime, if set or not nullable.
Args:
value (str): The timestamp.
field (.SchemaField): The field corresponding to the value.
Returns:
Optional[datetime.datetime]: The parsed datetime object from
``value`` if the ``field`` is not null (otherwise it is
:data:`None`).
def _timestamp_query_param_from_json(value, field):
"""Coerce 'value' to a datetime, if set or not nullable.
Args:
value (str): The timestamp.
field (.SchemaField): The field corresponding to the value.
Returns:
Optional[datetime.datetime]: The parsed datetime object from
``value`` if the ``field`` is not null (otherwise it is
:data:`None`).
"""
if _not_null(value, field):
# Canonical formats for timestamps in BigQuery are flexible. See:
# g.co/cloud/bigquery/docs/reference/standard-sql/data-types#timestamp-type
# The separator between the date and time can be 'T' or ' '.
value = value.replace(" ", "T", 1)
# The UTC timezone may be formatted as Z or +00:00.
value = value.replace("Z", "")
value = value.replace("+00:00", "")
if "." in value:
# YYYY-MM-DDTHH:MM:SS.ffffff
return datetime.datetime.strptime(value, _RFC3339_MICROS_NO_ZULU).replace(
tzinfo=UTC
)
else:
# YYYY-MM-DDTHH:MM:SS
return datetime.datetime.strptime(value, _RFC3339_NO_FRACTION).replace(
tzinfo=UTC
)
else:
return None |
Coerce 'value' to a datetime, if set or not nullable.
Args:
value (str): The timestamp.
field (.SchemaField): The field corresponding to the value.
Returns:
Optional[datetime.datetime]: The parsed datetime object from
``value`` if the ``field`` is not null (otherwise it is
:data:`None`).
def _datetime_from_json(value, field):
"""Coerce 'value' to a datetime, if set or not nullable.
Args:
value (str): The timestamp.
field (.SchemaField): The field corresponding to the value.
Returns:
Optional[datetime.datetime]: The parsed datetime object from
``value`` if the ``field`` is not null (otherwise it is
:data:`None`).
"""
if _not_null(value, field):
if "." in value:
# YYYY-MM-DDTHH:MM:SS.ffffff
return datetime.datetime.strptime(value, _RFC3339_MICROS_NO_ZULU)
else:
# YYYY-MM-DDTHH:MM:SS
return datetime.datetime.strptime(value, _RFC3339_NO_FRACTION)
else:
return None |
Coerce 'value' to a datetime date, if set or not nullable
def _time_from_json(value, field):
"""Coerce 'value' to a datetime date, if set or not nullable"""
if _not_null(value, field):
if len(value) == 8: # HH:MM:SS
fmt = _TIMEONLY_WO_MICROS
elif len(value) == 15: # HH:MM:SS.micros
fmt = _TIMEONLY_W_MICROS
else:
raise ValueError("Unknown time format: {}".format(value))
return datetime.datetime.strptime(value, fmt).time() |
Coerce 'value' to a mapping, if set or not nullable.
def _record_from_json(value, field):
"""Coerce 'value' to a mapping, if set or not nullable."""
if _not_null(value, field):
record = {}
record_iter = zip(field.fields, value["f"])
for subfield, cell in record_iter:
converter = _CELLDATA_FROM_JSON[subfield.field_type]
if subfield.mode == "REPEATED":
value = [converter(item["v"], subfield) for item in cell["v"]]
else:
value = converter(cell["v"], subfield)
record[subfield.name] = value
return record |
Convert JSON row data to row with appropriate types.
Note: ``row['f']`` and ``schema`` are presumed to be of the same length.
:type row: dict
:param row: A JSON response row to be converted.
:type schema: tuple
:param schema: A tuple of
:class:`~google.cloud.bigquery.schema.SchemaField`.
:rtype: tuple
:returns: A tuple of data converted to native types.
def _row_tuple_from_json(row, schema):
"""Convert JSON row data to row with appropriate types.
Note: ``row['f']`` and ``schema`` are presumed to be of the same length.
:type row: dict
:param row: A JSON response row to be converted.
:type schema: tuple
:param schema: A tuple of
:class:`~google.cloud.bigquery.schema.SchemaField`.
:rtype: tuple
:returns: A tuple of data converted to native types.
"""
row_data = []
for field, cell in zip(schema, row["f"]):
converter = _CELLDATA_FROM_JSON[field.field_type]
if field.mode == "REPEATED":
row_data.append([converter(item["v"], field) for item in cell["v"]])
else:
row_data.append(converter(cell["v"], field))
return tuple(row_data) |
Convert JSON row data to rows with appropriate types.
def _rows_from_json(values, schema):
"""Convert JSON row data to rows with appropriate types."""
from google.cloud.bigquery import Row
field_to_index = _field_to_index_mapping(schema)
return [Row(_row_tuple_from_json(r, schema), field_to_index) for r in values] |
Coerce 'value' to a JSON-compatible representation.
def _decimal_to_json(value):
"""Coerce 'value' to a JSON-compatible representation."""
if isinstance(value, decimal.Decimal):
value = str(value)
return value |
Coerce 'value' to an JSON-compatible representation.
def _bytes_to_json(value):
"""Coerce 'value' to an JSON-compatible representation."""
if isinstance(value, bytes):
value = base64.standard_b64encode(value).decode("ascii")
return value |
Coerce 'value' to an JSON-compatible representation.
This version returns the string representation used in query parameters.
def _timestamp_to_json_parameter(value):
"""Coerce 'value' to an JSON-compatible representation.
This version returns the string representation used in query parameters.
"""
if isinstance(value, datetime.datetime):
if value.tzinfo not in (None, UTC):
# Convert to UTC and remove the time zone info.
value = value.replace(tzinfo=None) - value.utcoffset()
value = "%s %s+00:00" % (value.date().isoformat(), value.time().isoformat())
return value |
Coerce 'value' to an JSON-compatible representation.
This version returns floating-point seconds value used in row data.
def _timestamp_to_json_row(value):
"""Coerce 'value' to an JSON-compatible representation.
This version returns floating-point seconds value used in row data.
"""
if isinstance(value, datetime.datetime):
value = _microseconds_from_datetime(value) * 1e-6
return value |
Coerce 'value' to an JSON-compatible representation.
def _datetime_to_json(value):
"""Coerce 'value' to an JSON-compatible representation."""
if isinstance(value, datetime.datetime):
value = value.strftime(_RFC3339_MICROS_NO_ZULU)
return value |
Coerce 'value' to an JSON-compatible representation.
def _date_to_json(value):
"""Coerce 'value' to an JSON-compatible representation."""
if isinstance(value, datetime.date):
value = value.isoformat()
return value |
Coerce 'value' to an JSON-compatible representation.
def _time_to_json(value):
"""Coerce 'value' to an JSON-compatible representation."""
if isinstance(value, datetime.time):
value = value.isoformat()
return value |
Maps a field and value to a JSON-safe value.
Args:
field ( \
:class:`~google.cloud.bigquery.schema.SchemaField`, \
):
The SchemaField to use for type conversion and field name.
row_value (any):
Value to be converted, based on the field's type.
Returns:
any:
A JSON-serializable object.
def _scalar_field_to_json(field, row_value):
"""Maps a field and value to a JSON-safe value.
Args:
field ( \
:class:`~google.cloud.bigquery.schema.SchemaField`, \
):
The SchemaField to use for type conversion and field name.
row_value (any):
Value to be converted, based on the field's type.
Returns:
any:
A JSON-serializable object.
"""
converter = _SCALAR_VALUE_TO_JSON_ROW.get(field.field_type)
if converter is None: # STRING doesn't need converting
return row_value
return converter(row_value) |
Convert a repeated/array field to its JSON representation.
Args:
field ( \
:class:`~google.cloud.bigquery.schema.SchemaField`, \
):
The SchemaField to use for type conversion and field name. The
field mode must equal ``REPEATED``.
row_value (Sequence[any]):
A sequence of values to convert to JSON-serializable values.
Returns:
List[any]:
A list of JSON-serializable objects.
def _repeated_field_to_json(field, row_value):
"""Convert a repeated/array field to its JSON representation.
Args:
field ( \
:class:`~google.cloud.bigquery.schema.SchemaField`, \
):
The SchemaField to use for type conversion and field name. The
field mode must equal ``REPEATED``.
row_value (Sequence[any]):
A sequence of values to convert to JSON-serializable values.
Returns:
List[any]:
A list of JSON-serializable objects.
"""
# Remove the REPEATED, but keep the other fields. This allows us to process
# each item as if it were a top-level field.
item_field = copy.deepcopy(field)
item_field._mode = "NULLABLE"
values = []
for item in row_value:
values.append(_field_to_json(item_field, item))
return values |
Convert a record/struct field to its JSON representation.
Args:
fields ( \
Sequence[:class:`~google.cloud.bigquery.schema.SchemaField`], \
):
The :class:`~google.cloud.bigquery.schema.SchemaField`s of the
record's subfields to use for type conversion and field names.
row_value (Union[Tuple[Any], Mapping[str, Any]):
A tuple or dictionary to convert to JSON-serializable values.
Returns:
Mapping[str, any]:
A JSON-serializable dictionary.
def _record_field_to_json(fields, row_value):
"""Convert a record/struct field to its JSON representation.
Args:
fields ( \
Sequence[:class:`~google.cloud.bigquery.schema.SchemaField`], \
):
The :class:`~google.cloud.bigquery.schema.SchemaField`s of the
record's subfields to use for type conversion and field names.
row_value (Union[Tuple[Any], Mapping[str, Any]):
A tuple or dictionary to convert to JSON-serializable values.
Returns:
Mapping[str, any]:
A JSON-serializable dictionary.
"""
record = {}
isdict = isinstance(row_value, dict)
for subindex, subfield in enumerate(fields):
subname = subfield.name
if isdict:
subvalue = row_value.get(subname)
else:
subvalue = row_value[subindex]
record[subname] = _field_to_json(subfield, subvalue)
return record |
Convert a field into JSON-serializable values.
Args:
field ( \
:class:`~google.cloud.bigquery.schema.SchemaField`, \
):
The SchemaField to use for type conversion and field name.
row_value (Union[ \
Sequence[list], \
any, \
]):
Row data to be inserted. If the SchemaField's mode is
REPEATED, assume this is a list. If not, the type
is inferred from the SchemaField's field_type.
Returns:
any:
A JSON-serializable object.
def _field_to_json(field, row_value):
"""Convert a field into JSON-serializable values.
Args:
field ( \
:class:`~google.cloud.bigquery.schema.SchemaField`, \
):
The SchemaField to use for type conversion and field name.
row_value (Union[ \
Sequence[list], \
any, \
]):
Row data to be inserted. If the SchemaField's mode is
REPEATED, assume this is a list. If not, the type
is inferred from the SchemaField's field_type.
Returns:
any:
A JSON-serializable object.
"""
if row_value is None:
return None
if field.mode == "REPEATED":
return _repeated_field_to_json(field, row_value)
if field.field_type == "RECORD":
return _record_field_to_json(field.fields, row_value)
return _scalar_field_to_json(field, row_value) |
Convert snake case string to camel case.
def _snake_to_camel_case(value):
"""Convert snake case string to camel case."""
words = value.split("_")
return words[0] + "".join(map(str.capitalize, words[1:])) |
Get a nested value from a dictionary.
This method works like ``dict.get(key)``, but for nested values.
Arguments:
container (dict):
A dictionary which may contain other dictionaries as values.
keys (iterable):
A sequence of keys to attempt to get the value for. Each item in
the sequence represents a deeper nesting. The first key is for
the top level. If there is a dictionary there, the second key
attempts to get the value within that, and so on.
default (object):
(Optional) Value to returned if any of the keys are not found.
Defaults to ``None``.
Examples:
Get a top-level value (equivalent to ``container.get('key')``).
>>> _get_sub_prop({'key': 'value'}, ['key'])
'value'
Get a top-level value, providing a default (equivalent to
``container.get('key', default='default')``).
>>> _get_sub_prop({'nothere': 123}, ['key'], default='not found')
'not found'
Get a nested value.
>>> _get_sub_prop({'key': {'subkey': 'value'}}, ['key', 'subkey'])
'value'
Returns:
object: The value if present or the default.
def _get_sub_prop(container, keys, default=None):
"""Get a nested value from a dictionary.
This method works like ``dict.get(key)``, but for nested values.
Arguments:
container (dict):
A dictionary which may contain other dictionaries as values.
keys (iterable):
A sequence of keys to attempt to get the value for. Each item in
the sequence represents a deeper nesting. The first key is for
the top level. If there is a dictionary there, the second key
attempts to get the value within that, and so on.
default (object):
(Optional) Value to returned if any of the keys are not found.
Defaults to ``None``.
Examples:
Get a top-level value (equivalent to ``container.get('key')``).
>>> _get_sub_prop({'key': 'value'}, ['key'])
'value'
Get a top-level value, providing a default (equivalent to
``container.get('key', default='default')``).
>>> _get_sub_prop({'nothere': 123}, ['key'], default='not found')
'not found'
Get a nested value.
>>> _get_sub_prop({'key': {'subkey': 'value'}}, ['key', 'subkey'])
'value'
Returns:
object: The value if present or the default.
"""
sub_val = container
for key in keys:
if key not in sub_val:
return default
sub_val = sub_val[key]
return sub_val |
Set a nested value in a dictionary.
Arguments:
container (dict):
A dictionary which may contain other dictionaries as values.
keys (iterable):
A sequence of keys to attempt to set the value for. Each item in
the sequence represents a deeper nesting. The first key is for
the top level. If there is a dictionary there, the second key
attempts to get the value within that, and so on.
value (object): Value to set within the container.
Examples:
Set a top-level value (equivalent to ``container['key'] = 'value'``).
>>> container = {}
>>> _set_sub_prop(container, ['key'], 'value')
>>> container
{'key': 'value'}
Set a nested value.
>>> container = {}
>>> _set_sub_prop(container, ['key', 'subkey'], 'value')
>>> container
{'key': {'subkey': 'value'}}
Replace a nested value.
>>> container = {'key': {'subkey': 'prev'}}
>>> _set_sub_prop(container, ['key', 'subkey'], 'new')
>>> container
{'key': {'subkey': 'new'}}
def _set_sub_prop(container, keys, value):
"""Set a nested value in a dictionary.
Arguments:
container (dict):
A dictionary which may contain other dictionaries as values.
keys (iterable):
A sequence of keys to attempt to set the value for. Each item in
the sequence represents a deeper nesting. The first key is for
the top level. If there is a dictionary there, the second key
attempts to get the value within that, and so on.
value (object): Value to set within the container.
Examples:
Set a top-level value (equivalent to ``container['key'] = 'value'``).
>>> container = {}
>>> _set_sub_prop(container, ['key'], 'value')
>>> container
{'key': 'value'}
Set a nested value.
>>> container = {}
>>> _set_sub_prop(container, ['key', 'subkey'], 'value')
>>> container
{'key': {'subkey': 'value'}}
Replace a nested value.
>>> container = {'key': {'subkey': 'prev'}}
>>> _set_sub_prop(container, ['key', 'subkey'], 'new')
>>> container
{'key': {'subkey': 'new'}}
"""
sub_val = container
for key in keys[:-1]:
if key not in sub_val:
sub_val[key] = {}
sub_val = sub_val[key]
sub_val[keys[-1]] = value |
Remove a nested key fro a dictionary.
Arguments:
container (dict):
A dictionary which may contain other dictionaries as values.
keys (iterable):
A sequence of keys to attempt to clear the value for. Each item in
the sequence represents a deeper nesting. The first key is for
the top level. If there is a dictionary there, the second key
attempts to get the value within that, and so on.
Examples:
Remove a top-level value (equivalent to ``del container['key']``).
>>> container = {'key': 'value'}
>>> _del_sub_prop(container, ['key'])
>>> container
{}
Remove a nested value.
>>> container = {'key': {'subkey': 'value'}}
>>> _del_sub_prop(container, ['key', 'subkey'])
>>> container
{'key': {}}
def _del_sub_prop(container, keys):
"""Remove a nested key fro a dictionary.
Arguments:
container (dict):
A dictionary which may contain other dictionaries as values.
keys (iterable):
A sequence of keys to attempt to clear the value for. Each item in
the sequence represents a deeper nesting. The first key is for
the top level. If there is a dictionary there, the second key
attempts to get the value within that, and so on.
Examples:
Remove a top-level value (equivalent to ``del container['key']``).
>>> container = {'key': 'value'}
>>> _del_sub_prop(container, ['key'])
>>> container
{}
Remove a nested value.
>>> container = {'key': {'subkey': 'value'}}
>>> _del_sub_prop(container, ['key', 'subkey'])
>>> container
{'key': {}}
"""
sub_val = container
for key in keys[:-1]:
if key not in sub_val:
sub_val[key] = {}
sub_val = sub_val[key]
if keys[-1] in sub_val:
del sub_val[keys[-1]] |
Build a resource based on a ``_properties`` dictionary, filtered by
``filter_fields``, which follow the name of the Python object.
def _build_resource_from_properties(obj, filter_fields):
"""Build a resource based on a ``_properties`` dictionary, filtered by
``filter_fields``, which follow the name of the Python object.
"""
partial = {}
for filter_field in filter_fields:
api_field = obj._PROPERTY_TO_API_FIELD.get(filter_field)
if api_field is None and filter_field not in obj._properties:
raise ValueError("No property %s" % filter_field)
elif api_field is not None:
partial[api_field] = obj._properties.get(api_field)
else:
# allows properties that are not defined in the library
# and properties that have the same name as API resource key
partial[filter_field] = obj._properties[filter_field]
return partial |
Sample ID: go/samples-tracker/1510
def get_model(client, model_id):
"""Sample ID: go/samples-tracker/1510"""
# [START bigquery_get_model]
from google.cloud import bigquery
# TODO(developer): Construct a BigQuery client object.
# client = bigquery.Client()
# TODO(developer): Set model_id to the ID of the model to fetch.
# model_id = 'your-project.your_dataset.your_model'
model = client.get_model(model_id)
full_model_id = "{}.{}.{}".format(model.project, model.dataset_id, model.model_id)
friendly_name = model.friendly_name
print(
"Got model '{}' with friendly_name '{}'.".format(full_model_id, friendly_name)
) |
Make sure a "Reference" database ID is empty.
:type database_id: unicode
:param database_id: The ``database_id`` field from a "Reference" protobuf.
:raises: :exc:`ValueError` if the ``database_id`` is not empty.
def _check_database_id(database_id):
"""Make sure a "Reference" database ID is empty.
:type database_id: unicode
:param database_id: The ``database_id`` field from a "Reference" protobuf.
:raises: :exc:`ValueError` if the ``database_id`` is not empty.
"""
if database_id != u"":
msg = _DATABASE_ID_TEMPLATE.format(database_id)
raise ValueError(msg) |
Add the ID or name from an element to a list.
:type flat_path: list
:param flat_path: List of accumulated path parts.
:type element_pb: :class:`._app_engine_key_pb2.Path.Element`
:param element_pb: The element containing ID or name.
:type empty_allowed: bool
:param empty_allowed: Indicates if neither ID or name need be set. If
:data:`False`, then **exactly** one of them must be.
:raises: :exc:`ValueError` if 0 or 2 of ID/name are set (unless
``empty_allowed=True`` and 0 are set).
def _add_id_or_name(flat_path, element_pb, empty_allowed):
"""Add the ID or name from an element to a list.
:type flat_path: list
:param flat_path: List of accumulated path parts.
:type element_pb: :class:`._app_engine_key_pb2.Path.Element`
:param element_pb: The element containing ID or name.
:type empty_allowed: bool
:param empty_allowed: Indicates if neither ID or name need be set. If
:data:`False`, then **exactly** one of them must be.
:raises: :exc:`ValueError` if 0 or 2 of ID/name are set (unless
``empty_allowed=True`` and 0 are set).
"""
id_ = element_pb.id
name = element_pb.name
# NOTE: Below 0 and the empty string are the "null" values for their
# respective types, indicating that the value is unset.
if id_ == 0:
if name == u"":
if not empty_allowed:
raise ValueError(_EMPTY_ELEMENT)
else:
flat_path.append(name)
else:
if name == u"":
flat_path.append(id_)
else:
msg = _BAD_ELEMENT_TEMPLATE.format(id_, name)
raise ValueError(msg) |
Convert a legacy "Path" protobuf to a flat path.
For example
Element {
type: "parent"
id: 59
}
Element {
type: "child"
name: "naem"
}
would convert to ``('parent', 59, 'child', 'naem')``.
:type path_pb: :class:`._app_engine_key_pb2.Path`
:param path_pb: Legacy protobuf "Path" object (from a "Reference").
:rtype: tuple
:returns: The path parts from ``path_pb``.
def _get_flat_path(path_pb):
"""Convert a legacy "Path" protobuf to a flat path.
For example
Element {
type: "parent"
id: 59
}
Element {
type: "child"
name: "naem"
}
would convert to ``('parent', 59, 'child', 'naem')``.
:type path_pb: :class:`._app_engine_key_pb2.Path`
:param path_pb: Legacy protobuf "Path" object (from a "Reference").
:rtype: tuple
:returns: The path parts from ``path_pb``.
"""
num_elts = len(path_pb.element)
last_index = num_elts - 1
result = []
for index, element in enumerate(path_pb.element):
result.append(element.type)
_add_id_or_name(result, element, index == last_index)
return tuple(result) |
Convert a tuple of ints and strings in a legacy "Path".
.. note:
This assumes, but does not verify, that each entry in
``dict_path`` is valid (i.e. doesn't have more than one
key out of "name" / "id").
:type dict_path: lsit
:param dict_path: The "structured" path for a key, i.e. it
is a list of dictionaries, each of which has
"kind" and one of "name" / "id" as keys.
:rtype: :class:`._app_engine_key_pb2.Path`
:returns: The legacy path corresponding to ``dict_path``.
def _to_legacy_path(dict_path):
"""Convert a tuple of ints and strings in a legacy "Path".
.. note:
This assumes, but does not verify, that each entry in
``dict_path`` is valid (i.e. doesn't have more than one
key out of "name" / "id").
:type dict_path: lsit
:param dict_path: The "structured" path for a key, i.e. it
is a list of dictionaries, each of which has
"kind" and one of "name" / "id" as keys.
:rtype: :class:`._app_engine_key_pb2.Path`
:returns: The legacy path corresponding to ``dict_path``.
"""
elements = []
for part in dict_path:
element_kwargs = {"type": part["kind"]}
if "id" in part:
element_kwargs["id"] = part["id"]
elif "name" in part:
element_kwargs["name"] = part["name"]
element = _app_engine_key_pb2.Path.Element(**element_kwargs)
elements.append(element)
return _app_engine_key_pb2.Path(element=elements) |
Parses positional arguments into key path with kinds and IDs.
:type path_args: tuple
:param path_args: A tuple from positional arguments. Should be
alternating list of kinds (string) and ID/name
parts (int or string).
:rtype: :class:`list` of :class:`dict`
:returns: A list of key parts with kind and ID or name set.
:raises: :class:`ValueError` if there are no ``path_args``, if one of
the kinds is not a string or if one of the IDs/names is not
a string or an integer.
def _parse_path(path_args):
"""Parses positional arguments into key path with kinds and IDs.
:type path_args: tuple
:param path_args: A tuple from positional arguments. Should be
alternating list of kinds (string) and ID/name
parts (int or string).
:rtype: :class:`list` of :class:`dict`
:returns: A list of key parts with kind and ID or name set.
:raises: :class:`ValueError` if there are no ``path_args``, if one of
the kinds is not a string or if one of the IDs/names is not
a string or an integer.
"""
if len(path_args) == 0:
raise ValueError("Key path must not be empty.")
kind_list = path_args[::2]
id_or_name_list = path_args[1::2]
# Dummy sentinel value to pad incomplete key to even length path.
partial_ending = object()
if len(path_args) % 2 == 1:
id_or_name_list += (partial_ending,)
result = []
for kind, id_or_name in zip(kind_list, id_or_name_list):
curr_key_part = {}
if isinstance(kind, six.string_types):
curr_key_part["kind"] = kind
else:
raise ValueError(kind, "Kind was not a string.")
if isinstance(id_or_name, six.string_types):
curr_key_part["name"] = id_or_name
elif isinstance(id_or_name, six.integer_types):
curr_key_part["id"] = id_or_name
elif id_or_name is not partial_ending:
raise ValueError(id_or_name, "ID/name was not a string or integer.")
result.append(curr_key_part)
return result |
Sets protected data by combining raw data set from the constructor.
If a ``_parent`` is set, updates the ``_flat_path`` and sets the
``_namespace`` and ``_project`` if not already set.
:rtype: :class:`list` of :class:`dict`
:returns: A list of key parts with kind and ID or name set.
:raises: :class:`ValueError` if the parent key is not complete.
def _combine_args(self):
"""Sets protected data by combining raw data set from the constructor.
If a ``_parent`` is set, updates the ``_flat_path`` and sets the
``_namespace`` and ``_project`` if not already set.
:rtype: :class:`list` of :class:`dict`
:returns: A list of key parts with kind and ID or name set.
:raises: :class:`ValueError` if the parent key is not complete.
"""
child_path = self._parse_path(self._flat_path)
if self._parent is not None:
if self._parent.is_partial:
raise ValueError("Parent key must be complete.")
# We know that _parent.path() will return a copy.
child_path = self._parent.path + child_path
self._flat_path = self._parent.flat_path + self._flat_path
if (
self._namespace is not None
and self._namespace != self._parent.namespace
):
raise ValueError("Child namespace must agree with parent's.")
self._namespace = self._parent.namespace
if self._project is not None and self._project != self._parent.project:
raise ValueError("Child project must agree with parent's.")
self._project = self._parent.project
return child_path |
Duplicates the Key.
Most attributes are simple types, so don't require copying. Other
attributes like ``parent`` are long-lived and so we re-use them.
:rtype: :class:`google.cloud.datastore.key.Key`
:returns: A new ``Key`` instance with the same data as the current one.
def _clone(self):
"""Duplicates the Key.
Most attributes are simple types, so don't require copying. Other
attributes like ``parent`` are long-lived and so we re-use them.
:rtype: :class:`google.cloud.datastore.key.Key`
:returns: A new ``Key`` instance with the same data as the current one.
"""
cloned_self = self.__class__(
*self.flat_path, project=self.project, namespace=self.namespace
)
# If the current parent has already been set, we re-use
# the same instance
cloned_self._parent = self._parent
return cloned_self |
Creates new key from existing partial key by adding final ID/name.
:type id_or_name: str or integer
:param id_or_name: ID or name to be added to the key.
:rtype: :class:`google.cloud.datastore.key.Key`
:returns: A new ``Key`` instance with the same data as the current one
and an extra ID or name added.
:raises: :class:`ValueError` if the current key is not partial or if
``id_or_name`` is not a string or integer.
def completed_key(self, id_or_name):
"""Creates new key from existing partial key by adding final ID/name.
:type id_or_name: str or integer
:param id_or_name: ID or name to be added to the key.
:rtype: :class:`google.cloud.datastore.key.Key`
:returns: A new ``Key`` instance with the same data as the current one
and an extra ID or name added.
:raises: :class:`ValueError` if the current key is not partial or if
``id_or_name`` is not a string or integer.
"""
if not self.is_partial:
raise ValueError("Only a partial key can be completed.")
if isinstance(id_or_name, six.string_types):
id_or_name_key = "name"
elif isinstance(id_or_name, six.integer_types):
id_or_name_key = "id"
else:
raise ValueError(id_or_name, "ID/name was not a string or integer.")
new_key = self._clone()
new_key._path[-1][id_or_name_key] = id_or_name
new_key._flat_path += (id_or_name,)
return new_key |
Return a protobuf corresponding to the key.
:rtype: :class:`.entity_pb2.Key`
:returns: The protobuf representing the key.
def to_protobuf(self):
"""Return a protobuf corresponding to the key.
:rtype: :class:`.entity_pb2.Key`
:returns: The protobuf representing the key.
"""
key = _entity_pb2.Key()
key.partition_id.project_id = self.project
if self.namespace:
key.partition_id.namespace_id = self.namespace
for item in self.path:
element = key.path.add()
if "kind" in item:
element.kind = item["kind"]
if "id" in item:
element.id = item["id"]
if "name" in item:
element.name = item["name"]
return key |
Convert to a base64 encode urlsafe string for App Engine.
This is intended to work with the "legacy" representation of a
datastore "Key" used within Google App Engine (a so-called
"Reference"). The returned string can be used as the ``urlsafe``
argument to ``ndb.Key(urlsafe=...)``. The base64 encoded values
will have padding removed.
.. note::
The string returned by ``to_legacy_urlsafe`` is equivalent, but
not identical, to the string returned by ``ndb``. The location
prefix may need to be specified to obtain identical urlsafe
keys.
:type location_prefix: str
:param location_prefix: The location prefix of an App Engine project
ID. Often this value is 's~', but may also be
'e~', or other location prefixes currently
unknown.
:rtype: bytes
:returns: A bytestring containing the key encoded as URL-safe base64.
def to_legacy_urlsafe(self, location_prefix=None):
"""Convert to a base64 encode urlsafe string for App Engine.
This is intended to work with the "legacy" representation of a
datastore "Key" used within Google App Engine (a so-called
"Reference"). The returned string can be used as the ``urlsafe``
argument to ``ndb.Key(urlsafe=...)``. The base64 encoded values
will have padding removed.
.. note::
The string returned by ``to_legacy_urlsafe`` is equivalent, but
not identical, to the string returned by ``ndb``. The location
prefix may need to be specified to obtain identical urlsafe
keys.
:type location_prefix: str
:param location_prefix: The location prefix of an App Engine project
ID. Often this value is 's~', but may also be
'e~', or other location prefixes currently
unknown.
:rtype: bytes
:returns: A bytestring containing the key encoded as URL-safe base64.
"""
if location_prefix is None:
project_id = self.project
else:
project_id = location_prefix + self.project
reference = _app_engine_key_pb2.Reference(
app=project_id,
path=_to_legacy_path(self._path), # Avoid the copy.
name_space=self.namespace,
)
raw_bytes = reference.SerializeToString()
return base64.urlsafe_b64encode(raw_bytes).strip(b"=") |
Convert urlsafe string to :class:`~google.cloud.datastore.key.Key`.
This is intended to work with the "legacy" representation of a
datastore "Key" used within Google App Engine (a so-called
"Reference"). This assumes that ``urlsafe`` was created within an App
Engine app via something like ``ndb.Key(...).urlsafe()``.
:type urlsafe: bytes or unicode
:param urlsafe: The base64 encoded (ASCII) string corresponding to a
datastore "Key" / "Reference".
:rtype: :class:`~google.cloud.datastore.key.Key`.
:returns: The key corresponding to ``urlsafe``.
def from_legacy_urlsafe(cls, urlsafe):
"""Convert urlsafe string to :class:`~google.cloud.datastore.key.Key`.
This is intended to work with the "legacy" representation of a
datastore "Key" used within Google App Engine (a so-called
"Reference"). This assumes that ``urlsafe`` was created within an App
Engine app via something like ``ndb.Key(...).urlsafe()``.
:type urlsafe: bytes or unicode
:param urlsafe: The base64 encoded (ASCII) string corresponding to a
datastore "Key" / "Reference".
:rtype: :class:`~google.cloud.datastore.key.Key`.
:returns: The key corresponding to ``urlsafe``.
"""
urlsafe = _to_bytes(urlsafe, encoding="ascii")
padding = b"=" * (-len(urlsafe) % 4)
urlsafe += padding
raw_bytes = base64.urlsafe_b64decode(urlsafe)
reference = _app_engine_key_pb2.Reference()
reference.ParseFromString(raw_bytes)
project = _clean_app(reference.app)
namespace = _get_empty(reference.name_space, u"")
_check_database_id(reference.database_id)
flat_path = _get_flat_path(reference.path)
return cls(*flat_path, project=project, namespace=namespace) |
Creates a parent key for the current path.
Extracts all but the last element in the key path and creates a new
key, while still matching the namespace and the project.
:rtype: :class:`google.cloud.datastore.key.Key` or :class:`NoneType`
:returns: A new ``Key`` instance, whose path consists of all but the
last element of current path. If the current key has only
one path element, returns ``None``.
def _make_parent(self):
"""Creates a parent key for the current path.
Extracts all but the last element in the key path and creates a new
key, while still matching the namespace and the project.
:rtype: :class:`google.cloud.datastore.key.Key` or :class:`NoneType`
:returns: A new ``Key`` instance, whose path consists of all but the
last element of current path. If the current key has only
one path element, returns ``None``.
"""
if self.is_partial:
parent_args = self.flat_path[:-1]
else:
parent_args = self.flat_path[:-2]
if parent_args:
return self.__class__(
*parent_args, project=self.project, namespace=self.namespace
) |
The parent of the current key.
:rtype: :class:`google.cloud.datastore.key.Key` or :class:`NoneType`
:returns: A new ``Key`` instance, whose path consists of all but the
last element of current path. If the current key has only
one path element, returns ``None``.
def parent(self):
"""The parent of the current key.
:rtype: :class:`google.cloud.datastore.key.Key` or :class:`NoneType`
:returns: A new ``Key`` instance, whose path consists of all but the
last element of current path. If the current key has only
one path element, returns ``None``.
"""
if self._parent is None:
self._parent = self._make_parent()
return self._parent |
Factory: construct a record set given its API representation
:type resource: dict
:param resource: record sets representation returned from the API
:type zone: :class:`google.cloud.dns.zone.ManagedZone`
:param zone: A zone which holds one or more record sets.
:rtype: :class:`google.cloud.dns.zone.ResourceRecordSet`
:returns: RRS parsed from ``resource``.
def from_api_repr(cls, resource, zone):
"""Factory: construct a record set given its API representation
:type resource: dict
:param resource: record sets representation returned from the API
:type zone: :class:`google.cloud.dns.zone.ManagedZone`
:param zone: A zone which holds one or more record sets.
:rtype: :class:`google.cloud.dns.zone.ResourceRecordSet`
:returns: RRS parsed from ``resource``.
"""
name = resource["name"]
record_type = resource["type"]
ttl = int(resource["ttl"])
rrdatas = resource["rrdatas"]
return cls(name, record_type, ttl, rrdatas, zone=zone) |
The entry point for the worker thread.
Pulls pending log entries off the queue and writes them in batches to
the Cloud Logger.
def _thread_main(self):
"""The entry point for the worker thread.
Pulls pending log entries off the queue and writes them in batches to
the Cloud Logger.
"""
_LOGGER.debug("Background thread started.")
quit_ = False
while True:
batch = self._cloud_logger.batch()
items = _get_many(
self._queue,
max_items=self._max_batch_size,
max_latency=self._max_latency,
)
for item in items:
if item is _WORKER_TERMINATOR:
quit_ = True
# Continue processing items, don't break, try to process
# all items we got back before quitting.
else:
batch.log_struct(**item)
self._safely_commit_batch(batch)
for _ in range(len(items)):
self._queue.task_done()
if quit_:
break
_LOGGER.debug("Background thread exited gracefully.") |
Starts the background thread.
Additionally, this registers a handler for process exit to attempt
to send any pending log entries before shutdown.
def start(self):
"""Starts the background thread.
Additionally, this registers a handler for process exit to attempt
to send any pending log entries before shutdown.
"""
with self._operational_lock:
if self.is_alive:
return
self._thread = threading.Thread(
target=self._thread_main, name=_WORKER_THREAD_NAME
)
self._thread.daemon = True
self._thread.start()
atexit.register(self._main_thread_terminated) |
Signals the background thread to stop.
This does not terminate the background thread. It simply queues the
stop signal. If the main process exits before the background thread
processes the stop signal, it will be terminated without finishing
work. The ``grace_period`` parameter will give the background
thread some time to finish processing before this function returns.
:type grace_period: float
:param grace_period: If specified, this method will block up to this
many seconds to allow the background thread to finish work before
returning.
:rtype: bool
:returns: True if the thread terminated. False if the thread is still
running.
def stop(self, grace_period=None):
"""Signals the background thread to stop.
This does not terminate the background thread. It simply queues the
stop signal. If the main process exits before the background thread
processes the stop signal, it will be terminated without finishing
work. The ``grace_period`` parameter will give the background
thread some time to finish processing before this function returns.
:type grace_period: float
:param grace_period: If specified, this method will block up to this
many seconds to allow the background thread to finish work before
returning.
:rtype: bool
:returns: True if the thread terminated. False if the thread is still
running.
"""
if not self.is_alive:
return True
with self._operational_lock:
self._queue.put_nowait(_WORKER_TERMINATOR)
if grace_period is not None:
print("Waiting up to %d seconds." % (grace_period,), file=sys.stderr)
self._thread.join(timeout=grace_period)
# Check this before disowning the thread, because after we disown
# the thread is_alive will be False regardless of if the thread
# exited or not.
success = not self.is_alive
self._thread = None
return success |
Callback that attempts to send pending logs before termination.
def _main_thread_terminated(self):
"""Callback that attempts to send pending logs before termination."""
if not self.is_alive:
return
if not self._queue.empty():
print(
"Program shutting down, attempting to send %d queued log "
"entries to Stackdriver Logging..." % (self._queue.qsize(),),
file=sys.stderr,
)
if self.stop(self._grace_period):
print("Sent all pending logs.", file=sys.stderr)
else:
print(
"Failed to send %d pending logs." % (self._queue.qsize(),),
file=sys.stderr,
) |
Queues a log entry to be written by the background thread.
:type record: :class:`logging.LogRecord`
:param record: Python log record that the handler was called with.
:type message: str
:param message: The message from the ``LogRecord`` after being
formatted by the associated log formatters.
:type resource: :class:`~google.cloud.logging.resource.Resource`
:param resource: (Optional) Monitored resource of the entry
:type labels: dict
:param labels: (Optional) Mapping of labels for the entry.
:type trace: str
:param trace: (optional) traceid to apply to the logging entry.
:type span_id: str
:param span_id: (optional) span_id within the trace for the log entry.
Specify the trace parameter if span_id is set.
def enqueue(
self, record, message, resource=None, labels=None, trace=None, span_id=None
):
"""Queues a log entry to be written by the background thread.
:type record: :class:`logging.LogRecord`
:param record: Python log record that the handler was called with.
:type message: str
:param message: The message from the ``LogRecord`` after being
formatted by the associated log formatters.
:type resource: :class:`~google.cloud.logging.resource.Resource`
:param resource: (Optional) Monitored resource of the entry
:type labels: dict
:param labels: (Optional) Mapping of labels for the entry.
:type trace: str
:param trace: (optional) traceid to apply to the logging entry.
:type span_id: str
:param span_id: (optional) span_id within the trace for the log entry.
Specify the trace parameter if span_id is set.
"""
self._queue.put_nowait(
{
"info": {"message": message, "python_logger": record.name},
"severity": record.levelname,
"resource": resource,
"labels": labels,
"trace": trace,
"span_id": span_id,
}
) |
Overrides Transport.send().
:type record: :class:`logging.LogRecord`
:param record: Python log record that the handler was called with.
:type message: str
:param message: The message from the ``LogRecord`` after being
formatted by the associated log formatters.
:type resource: :class:`~google.cloud.logging.resource.Resource`
:param resource: (Optional) Monitored resource of the entry.
:type labels: dict
:param labels: (Optional) Mapping of labels for the entry.
:type trace: str
:param trace: (optional) traceid to apply to the logging entry.
:type span_id: str
:param span_id: (optional) span_id within the trace for the log entry.
Specify the trace parameter if span_id is set.
def send(
self, record, message, resource=None, labels=None, trace=None, span_id=None
):
"""Overrides Transport.send().
:type record: :class:`logging.LogRecord`
:param record: Python log record that the handler was called with.
:type message: str
:param message: The message from the ``LogRecord`` after being
formatted by the associated log formatters.
:type resource: :class:`~google.cloud.logging.resource.Resource`
:param resource: (Optional) Monitored resource of the entry.
:type labels: dict
:param labels: (Optional) Mapping of labels for the entry.
:type trace: str
:param trace: (optional) traceid to apply to the logging entry.
:type span_id: str
:param span_id: (optional) span_id within the trace for the log entry.
Specify the trace parameter if span_id is set.
"""
self.worker.enqueue(
record,
message,
resource=resource,
labels=labels,
trace=trace,
span_id=span_id,
) |
Lex a field path into tokens (including dots).
Args:
path (str): field path to be lexed.
Returns:
List(str): tokens
def _tokenize_field_path(path):
"""Lex a field path into tokens (including dots).
Args:
path (str): field path to be lexed.
Returns:
List(str): tokens
"""
pos = 0
get_token = TOKENS_REGEX.match
match = get_token(path)
while match is not None:
type_ = match.lastgroup
value = match.group(type_)
yield value
pos = match.end()
match = get_token(path, pos)
if pos != len(path):
raise ValueError("Path {} not consumed, residue: {}".format(path, path[pos:])) |
Split a field path into valid elements (without dots).
Args:
path (str): field path to be lexed.
Returns:
List(str): tokens
Raises:
ValueError: if the path does not match the elements-interspersed-
with-dots pattern.
def split_field_path(path):
"""Split a field path into valid elements (without dots).
Args:
path (str): field path to be lexed.
Returns:
List(str): tokens
Raises:
ValueError: if the path does not match the elements-interspersed-
with-dots pattern.
"""
if not path:
return []
elements = []
want_dot = False
for element in _tokenize_field_path(path):
if want_dot:
if element != ".":
raise ValueError("Invalid path: {}".format(path))
else:
want_dot = False
else:
if element == ".":
raise ValueError("Invalid path: {}".format(path))
elements.append(element)
want_dot = True
if not want_dot or not elements:
raise ValueError("Invalid path: {}".format(path))
return elements |
Parse a **field path** from into a list of nested field names.
See :func:`field_path` for more on **field paths**.
Args:
api_repr (str):
The unique Firestore api representation which consists of
either simple or UTF-8 field names. It cannot exceed
1500 bytes, and cannot be empty. Simple field names match
``'^[_a-zA-Z][_a-zA-Z0-9]*$'``. All other field names are
escaped by surrounding them with backticks.
Returns:
List[str, ...]: The list of field names in the field path.
def parse_field_path(api_repr):
"""Parse a **field path** from into a list of nested field names.
See :func:`field_path` for more on **field paths**.
Args:
api_repr (str):
The unique Firestore api representation which consists of
either simple or UTF-8 field names. It cannot exceed
1500 bytes, and cannot be empty. Simple field names match
``'^[_a-zA-Z][_a-zA-Z0-9]*$'``. All other field names are
escaped by surrounding them with backticks.
Returns:
List[str, ...]: The list of field names in the field path.
"""
# code dredged back up from
# https://github.com/googleapis/google-cloud-python/pull/5109/files
field_names = []
for field_name in split_field_path(api_repr):
# non-simple field name
if field_name[0] == "`" and field_name[-1] == "`":
field_name = field_name[1:-1]
field_name = field_name.replace(_ESCAPED_BACKTICK, _BACKTICK)
field_name = field_name.replace(_ESCAPED_BACKSLASH, _BACKSLASH)
field_names.append(field_name)
return field_names |
Create a **field path** from a list of nested field names.
A **field path** is a ``.``-delimited concatenation of the field
names. It is used to represent a nested field. For example,
in the data
.. code-block: python
data = {
'aa': {
'bb': {
'cc': 10,
},
},
}
the field path ``'aa.bb.cc'`` represents that data stored in
``data['aa']['bb']['cc']``.
Args:
field_names (Iterable[str, ...]): The list of field names.
Returns:
str: The ``.``-delimited field path.
def render_field_path(field_names):
"""Create a **field path** from a list of nested field names.
A **field path** is a ``.``-delimited concatenation of the field
names. It is used to represent a nested field. For example,
in the data
.. code-block: python
data = {
'aa': {
'bb': {
'cc': 10,
},
},
}
the field path ``'aa.bb.cc'`` represents that data stored in
``data['aa']['bb']['cc']``.
Args:
field_names (Iterable[str, ...]): The list of field names.
Returns:
str: The ``.``-delimited field path.
"""
result = []
for field_name in field_names:
match = _SIMPLE_FIELD_NAME.match(field_name)
if match and match.group(0) == field_name:
result.append(field_name)
else:
replaced = field_name.replace(_BACKSLASH, _ESCAPED_BACKSLASH).replace(
_BACKTICK, _ESCAPED_BACKTICK
)
result.append(_BACKTICK + replaced + _BACKTICK)
return _FIELD_PATH_DELIMITER.join(result) |
Get a (potentially nested) value from a dictionary.
If the data is nested, for example:
.. code-block:: python
>>> data
{
'top1': {
'middle2': {
'bottom3': 20,
'bottom4': 22,
},
'middle5': True,
},
'top6': b'\x00\x01 foo',
}
a **field path** can be used to access the nested data. For
example:
.. code-block:: python
>>> get_nested_value('top1', data)
{
'middle2': {
'bottom3': 20,
'bottom4': 22,
},
'middle5': True,
}
>>> get_nested_value('top1.middle2', data)
{
'bottom3': 20,
'bottom4': 22,
}
>>> get_nested_value('top1.middle2.bottom3', data)
20
See :meth:`~.firestore_v1beta1.client.Client.field_path` for
more information on **field paths**.
Args:
field_path (str): A field path (``.``-delimited list of
field names).
data (Dict[str, Any]): The (possibly nested) data.
Returns:
Any: (A copy of) the value stored for the ``field_path``.
Raises:
KeyError: If the ``field_path`` does not match nested data.
def get_nested_value(field_path, data):
"""Get a (potentially nested) value from a dictionary.
If the data is nested, for example:
.. code-block:: python
>>> data
{
'top1': {
'middle2': {
'bottom3': 20,
'bottom4': 22,
},
'middle5': True,
},
'top6': b'\x00\x01 foo',
}
a **field path** can be used to access the nested data. For
example:
.. code-block:: python
>>> get_nested_value('top1', data)
{
'middle2': {
'bottom3': 20,
'bottom4': 22,
},
'middle5': True,
}
>>> get_nested_value('top1.middle2', data)
{
'bottom3': 20,
'bottom4': 22,
}
>>> get_nested_value('top1.middle2.bottom3', data)
20
See :meth:`~.firestore_v1beta1.client.Client.field_path` for
more information on **field paths**.
Args:
field_path (str): A field path (``.``-delimited list of
field names).
data (Dict[str, Any]): The (possibly nested) data.
Returns:
Any: (A copy of) the value stored for the ``field_path``.
Raises:
KeyError: If the ``field_path`` does not match nested data.
"""
field_names = parse_field_path(field_path)
nested_data = data
for index, field_name in enumerate(field_names):
if isinstance(nested_data, collections_abc.Mapping):
if field_name in nested_data:
nested_data = nested_data[field_name]
else:
if index == 0:
msg = _FIELD_PATH_MISSING_TOP.format(field_name)
raise KeyError(msg)
else:
partial = render_field_path(field_names[:index])
msg = _FIELD_PATH_MISSING_KEY.format(field_name, partial)
raise KeyError(msg)
else:
partial = render_field_path(field_names[:index])
msg = _FIELD_PATH_WRONG_TYPE.format(partial, field_name)
raise KeyError(msg)
return nested_data |
Factory: create a FieldPath from the string formatted per the API.
Args:
api_repr (str): a string path, with non-identifier elements quoted
It cannot exceed 1500 characters, and cannot be empty.
Returns:
(:class:`FieldPath`) An instance parsed from ``api_repr``.
Raises:
ValueError if the parsing fails
def from_api_repr(cls, api_repr):
"""Factory: create a FieldPath from the string formatted per the API.
Args:
api_repr (str): a string path, with non-identifier elements quoted
It cannot exceed 1500 characters, and cannot be empty.
Returns:
(:class:`FieldPath`) An instance parsed from ``api_repr``.
Raises:
ValueError if the parsing fails
"""
api_repr = api_repr.strip()
if not api_repr:
raise ValueError("Field path API representation cannot be empty.")
return cls(*parse_field_path(api_repr)) |
Factory: create a FieldPath from a unicode string representation.
This method splits on the character `.` and disallows the
characters `~*/[]`. To create a FieldPath whose components have
those characters, call the constructor.
Args:
path_string (str): A unicode string which cannot contain
`~*/[]` characters, cannot exceed 1500 bytes, and cannot be empty.
Returns:
(:class:`FieldPath`) An instance parsed from ``path_string``.
def from_string(cls, path_string):
"""Factory: create a FieldPath from a unicode string representation.
This method splits on the character `.` and disallows the
characters `~*/[]`. To create a FieldPath whose components have
those characters, call the constructor.
Args:
path_string (str): A unicode string which cannot contain
`~*/[]` characters, cannot exceed 1500 bytes, and cannot be empty.
Returns:
(:class:`FieldPath`) An instance parsed from ``path_string``.
"""
try:
return cls.from_api_repr(path_string)
except ValueError:
elements = path_string.split(".")
for element in elements:
if not element:
raise ValueError("Empty element")
if _LEADING_ALPHA_INVALID.match(element):
raise ValueError(
"Non-alphanum char in element with leading alpha: {}".format(
element
)
)
return FieldPath(*elements) |
Check whether ``other`` is an ancestor.
Returns:
(bool) True IFF ``other`` is an ancestor or equal to ``self``,
else False.
def eq_or_parent(self, other):
"""Check whether ``other`` is an ancestor.
Returns:
(bool) True IFF ``other`` is an ancestor or equal to ``self``,
else False.
"""
return self.parts[: len(other.parts)] == other.parts[: len(self.parts)] |
Return field paths for all parents.
Returns: Set[:class:`FieldPath`]
def lineage(self):
"""Return field paths for all parents.
Returns: Set[:class:`FieldPath`]
"""
indexes = six.moves.range(1, len(self.parts))
return {FieldPath(*self.parts[:index]) for index in indexes} |
Gets the latest state of a long-running operation.
Clients can use this method to poll the operation result at intervals
as recommended by the API service.
Example:
>>> from google.api_core import operations_v1
>>> api = operations_v1.OperationsClient()
>>> name = ''
>>> response = api.get_operation(name)
Args:
name (str): The name of the operation resource.
retry (google.api_core.retry.Retry): The retry strategy to use
when invoking the RPC. If unspecified, the default retry from
the client configuration will be used. If ``None``, then this
method will not retry the RPC at all.
timeout (float): The amount of time in seconds to wait for the RPC
to complete. Note that if ``retry`` is used, this timeout
applies to each individual attempt and the overall time it
takes for this method to complete may be longer. If
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
Returns:
google.longrunning.operations_pb2.Operation: The state of the
operation.
Raises:
google.api_core.exceptions.GoogleAPICallError: If an error occurred
while invoking the RPC, the appropriate ``GoogleAPICallError``
subclass will be raised.
def get_operation(
self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT
):
"""Gets the latest state of a long-running operation.
Clients can use this method to poll the operation result at intervals
as recommended by the API service.
Example:
>>> from google.api_core import operations_v1
>>> api = operations_v1.OperationsClient()
>>> name = ''
>>> response = api.get_operation(name)
Args:
name (str): The name of the operation resource.
retry (google.api_core.retry.Retry): The retry strategy to use
when invoking the RPC. If unspecified, the default retry from
the client configuration will be used. If ``None``, then this
method will not retry the RPC at all.
timeout (float): The amount of time in seconds to wait for the RPC
to complete. Note that if ``retry`` is used, this timeout
applies to each individual attempt and the overall time it
takes for this method to complete may be longer. If
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
Returns:
google.longrunning.operations_pb2.Operation: The state of the
operation.
Raises:
google.api_core.exceptions.GoogleAPICallError: If an error occurred
while invoking the RPC, the appropriate ``GoogleAPICallError``
subclass will be raised.
"""
request = operations_pb2.GetOperationRequest(name=name)
return self._get_operation(request, retry=retry, timeout=timeout) |
Lists operations that match the specified filter in the request.
Example:
>>> from google.api_core import operations_v1
>>> api = operations_v1.OperationsClient()
>>> name = ''
>>>
>>> # Iterate over all results
>>> for operation in api.list_operations(name):
>>> # process operation
>>> pass
>>>
>>> # Or iterate over results one page at a time
>>> iter = api.list_operations(name)
>>> for page in iter.pages:
>>> for operation in page:
>>> # process operation
>>> pass
Args:
name (str): The name of the operation collection.
filter_ (str): The standard list filter.
retry (google.api_core.retry.Retry): The retry strategy to use
when invoking the RPC. If unspecified, the default retry from
the client configuration will be used. If ``None``, then this
method will not retry the RPC at all.
timeout (float): The amount of time in seconds to wait for the RPC
to complete. Note that if ``retry`` is used, this timeout
applies to each individual attempt and the overall time it
takes for this method to complete may be longer. If
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
Returns:
google.api_core.page_iterator.Iterator: An iterator that yields
:class:`google.longrunning.operations_pb2.Operation` instances.
Raises:
google.api_core.exceptions.MethodNotImplemented: If the server
does not support this method. Services are not required to
implement this method.
google.api_core.exceptions.GoogleAPICallError: If an error occurred
while invoking the RPC, the appropriate ``GoogleAPICallError``
subclass will be raised.
def list_operations(
self,
name,
filter_,
retry=gapic_v1.method.DEFAULT,
timeout=gapic_v1.method.DEFAULT,
):
"""
Lists operations that match the specified filter in the request.
Example:
>>> from google.api_core import operations_v1
>>> api = operations_v1.OperationsClient()
>>> name = ''
>>>
>>> # Iterate over all results
>>> for operation in api.list_operations(name):
>>> # process operation
>>> pass
>>>
>>> # Or iterate over results one page at a time
>>> iter = api.list_operations(name)
>>> for page in iter.pages:
>>> for operation in page:
>>> # process operation
>>> pass
Args:
name (str): The name of the operation collection.
filter_ (str): The standard list filter.
retry (google.api_core.retry.Retry): The retry strategy to use
when invoking the RPC. If unspecified, the default retry from
the client configuration will be used. If ``None``, then this
method will not retry the RPC at all.
timeout (float): The amount of time in seconds to wait for the RPC
to complete. Note that if ``retry`` is used, this timeout
applies to each individual attempt and the overall time it
takes for this method to complete may be longer. If
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
Returns:
google.api_core.page_iterator.Iterator: An iterator that yields
:class:`google.longrunning.operations_pb2.Operation` instances.
Raises:
google.api_core.exceptions.MethodNotImplemented: If the server
does not support this method. Services are not required to
implement this method.
google.api_core.exceptions.GoogleAPICallError: If an error occurred
while invoking the RPC, the appropriate ``GoogleAPICallError``
subclass will be raised.
"""
# Create the request object.
request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
# Create the method used to fetch pages
method = functools.partial(self._list_operations, retry=retry, timeout=timeout)
iterator = page_iterator.GRPCIterator(
client=None,
method=method,
request=request,
items_field="operations",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator |
Starts asynchronous cancellation on a long-running operation.
The server makes a best effort to cancel the operation, but success is
not guaranteed. Clients can use :meth:`get_operation` or service-
specific methods to check whether the cancellation succeeded or whether
the operation completed despite cancellation. On successful
cancellation, the operation is not deleted; instead, it becomes an
operation with an ``Operation.error`` value with a
``google.rpc.Status.code`` of ``1``, corresponding to
``Code.CANCELLED``.
Example:
>>> from google.api_core import operations_v1
>>> api = operations_v1.OperationsClient()
>>> name = ''
>>> api.cancel_operation(name)
Args:
name (str): The name of the operation resource to be cancelled.
retry (google.api_core.retry.Retry): The retry strategy to use
when invoking the RPC. If unspecified, the default retry from
the client configuration will be used. If ``None``, then this
method will not retry the RPC at all.
timeout (float): The amount of time in seconds to wait for the RPC
to complete. Note that if ``retry`` is used, this timeout
applies to each individual attempt and the overall time it
takes for this method to complete may be longer. If
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
Raises:
google.api_core.exceptions.MethodNotImplemented: If the server
does not support this method. Services are not required to
implement this method.
google.api_core.exceptions.GoogleAPICallError: If an error occurred
while invoking the RPC, the appropriate ``GoogleAPICallError``
subclass will be raised.
def cancel_operation(
self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT
):
"""Starts asynchronous cancellation on a long-running operation.
The server makes a best effort to cancel the operation, but success is
not guaranteed. Clients can use :meth:`get_operation` or service-
specific methods to check whether the cancellation succeeded or whether
the operation completed despite cancellation. On successful
cancellation, the operation is not deleted; instead, it becomes an
operation with an ``Operation.error`` value with a
``google.rpc.Status.code`` of ``1``, corresponding to
``Code.CANCELLED``.
Example:
>>> from google.api_core import operations_v1
>>> api = operations_v1.OperationsClient()
>>> name = ''
>>> api.cancel_operation(name)
Args:
name (str): The name of the operation resource to be cancelled.
retry (google.api_core.retry.Retry): The retry strategy to use
when invoking the RPC. If unspecified, the default retry from
the client configuration will be used. If ``None``, then this
method will not retry the RPC at all.
timeout (float): The amount of time in seconds to wait for the RPC
to complete. Note that if ``retry`` is used, this timeout
applies to each individual attempt and the overall time it
takes for this method to complete may be longer. If
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
Raises:
google.api_core.exceptions.MethodNotImplemented: If the server
does not support this method. Services are not required to
implement this method.
google.api_core.exceptions.GoogleAPICallError: If an error occurred
while invoking the RPC, the appropriate ``GoogleAPICallError``
subclass will be raised.
"""
# Create the request object.
request = operations_pb2.CancelOperationRequest(name=name)
self._cancel_operation(request, retry=retry, timeout=timeout) |
Deletes a long-running operation.
This method indicates that the client is no longer interested in the
operation result. It does not cancel the operation.
Example:
>>> from google.api_core import operations_v1
>>> api = operations_v1.OperationsClient()
>>> name = ''
>>> api.delete_operation(name)
Args:
name (str): The name of the operation resource to be deleted.
retry (google.api_core.retry.Retry): The retry strategy to use
when invoking the RPC. If unspecified, the default retry from
the client configuration will be used. If ``None``, then this
method will not retry the RPC at all.
timeout (float): The amount of time in seconds to wait for the RPC
to complete. Note that if ``retry`` is used, this timeout
applies to each individual attempt and the overall time it
takes for this method to complete may be longer. If
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
Raises:
google.api_core.exceptions.MethodNotImplemented: If the server
does not support this method. Services are not required to
implement this method.
google.api_core.exceptions.GoogleAPICallError: If an error occurred
while invoking the RPC, the appropriate ``GoogleAPICallError``
subclass will be raised.
def delete_operation(
self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT
):
"""Deletes a long-running operation.
This method indicates that the client is no longer interested in the
operation result. It does not cancel the operation.
Example:
>>> from google.api_core import operations_v1
>>> api = operations_v1.OperationsClient()
>>> name = ''
>>> api.delete_operation(name)
Args:
name (str): The name of the operation resource to be deleted.
retry (google.api_core.retry.Retry): The retry strategy to use
when invoking the RPC. If unspecified, the default retry from
the client configuration will be used. If ``None``, then this
method will not retry the RPC at all.
timeout (float): The amount of time in seconds to wait for the RPC
to complete. Note that if ``retry`` is used, this timeout
applies to each individual attempt and the overall time it
takes for this method to complete may be longer. If
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
Raises:
google.api_core.exceptions.MethodNotImplemented: If the server
does not support this method. Services are not required to
implement this method.
google.api_core.exceptions.GoogleAPICallError: If an error occurred
while invoking the RPC, the appropriate ``GoogleAPICallError``
subclass will be raised.
"""
# Create the request object.
request = operations_pb2.DeleteOperationRequest(name=name)
self._delete_operation(request, retry=retry, timeout=timeout) |
Extract the config name from a full resource name.
>>> config_name_from_full_name('projects/my-proj/configs/my-config')
"my-config"
:type full_name: str
:param full_name:
The full resource name of a config. The full resource name looks like
``projects/project-name/configs/config-name`` and is returned as the
``name`` field of a config resource. See
https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs
:rtype: str
:returns: The config's short name, given its full resource name.
:raises: :class:`ValueError` if ``full_name`` is not the expected format
def config_name_from_full_name(full_name):
"""Extract the config name from a full resource name.
>>> config_name_from_full_name('projects/my-proj/configs/my-config')
"my-config"
:type full_name: str
:param full_name:
The full resource name of a config. The full resource name looks like
``projects/project-name/configs/config-name`` and is returned as the
``name`` field of a config resource. See
https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs
:rtype: str
:returns: The config's short name, given its full resource name.
:raises: :class:`ValueError` if ``full_name`` is not the expected format
"""
projects, _, configs, result = full_name.split("/")
if projects != "projects" or configs != "configs":
raise ValueError(
"Unexpected format of resource",
full_name,
'Expected "projects/{proj}/configs/{cfg}"',
)
return result |
Extract the variable name from a full resource name.
>>> variable_name_from_full_name(
'projects/my-proj/configs/my-config/variables/var-name')
"var-name"
>>> variable_name_from_full_name(
'projects/my-proj/configs/my-config/variables/another/var/name')
"another/var/name"
:type full_name: str
:param full_name:
The full resource name of a variable. The full resource name looks like
``projects/prj-name/configs/cfg-name/variables/var-name`` and is
returned as the ``name`` field of a variable resource. See
https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs.variables
:rtype: str
:returns: The variable's short name, given its full resource name.
:raises: :class:`ValueError` if ``full_name`` is not the expected format
def variable_name_from_full_name(full_name):
"""Extract the variable name from a full resource name.
>>> variable_name_from_full_name(
'projects/my-proj/configs/my-config/variables/var-name')
"var-name"
>>> variable_name_from_full_name(
'projects/my-proj/configs/my-config/variables/another/var/name')
"another/var/name"
:type full_name: str
:param full_name:
The full resource name of a variable. The full resource name looks like
``projects/prj-name/configs/cfg-name/variables/var-name`` and is
returned as the ``name`` field of a variable resource. See
https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs.variables
:rtype: str
:returns: The variable's short name, given its full resource name.
:raises: :class:`ValueError` if ``full_name`` is not the expected format
"""
projects, _, configs, _, variables, result = full_name.split("/", 5)
if projects != "projects" or configs != "configs" or variables != "variables":
raise ValueError(
"Unexpected format of resource",
full_name,
'Expected "projects/{proj}/configs/{cfg}/variables/..."',
)
return result |
Return the maximum value in this histogram.
If there are no values in the histogram at all, return 600.
Returns:
int: The maximum value in the histogram.
def max(self):
"""Return the maximum value in this histogram.
If there are no values in the histogram at all, return 600.
Returns:
int: The maximum value in the histogram.
"""
if len(self._data) == 0:
return 600
return next(iter(reversed(sorted(self._data.keys())))) |
Return the minimum value in this histogram.
If there are no values in the histogram at all, return 10.
Returns:
int: The minimum value in the histogram.
def min(self):
"""Return the minimum value in this histogram.
If there are no values in the histogram at all, return 10.
Returns:
int: The minimum value in the histogram.
"""
if len(self._data) == 0:
return 10
return next(iter(sorted(self._data.keys()))) |
Add the value to this histogram.
Args:
value (int): The value. Values outside of ``10 <= x <= 600``
will be raised to ``10`` or reduced to ``600``.
def add(self, value):
"""Add the value to this histogram.
Args:
value (int): The value. Values outside of ``10 <= x <= 600``
will be raised to ``10`` or reduced to ``600``.
"""
# If the value is out of bounds, bring it in bounds.
value = int(value)
if value < 10:
value = 10
if value > 600:
value = 600
# Add the value to the histogram's data dictionary.
self._data.setdefault(value, 0)
self._data[value] += 1
self._len += 1 |
Return the value that is the Nth precentile in the histogram.
Args:
percent (Union[int, float]): The precentile being sought. The
default consumer implementations use consistently use ``99``.
Returns:
int: The value corresponding to the requested percentile.
def percentile(self, percent):
"""Return the value that is the Nth precentile in the histogram.
Args:
percent (Union[int, float]): The precentile being sought. The
default consumer implementations use consistently use ``99``.
Returns:
int: The value corresponding to the requested percentile.
"""
# Sanity check: Any value over 100 should become 100.
if percent >= 100:
percent = 100
# Determine the actual target number.
target = len(self) - len(self) * (percent / 100)
# Iterate over the values in reverse, dropping the target by the
# number of times each value has been seen. When the target passes
# 0, return the value we are currently viewing.
for k in reversed(sorted(self._data.keys())):
target -= self._data[k]
if target < 0:
return k
# The only way to get here is if there was no data.
# In this case, just return 10 seconds.
return 10 |
Return a fully-qualified job string.
def job_path(cls, project, location, job):
"""Return a fully-qualified job string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/jobs/{job}",
project=project,
location=location,
job=job,
) |
Creates a job.
Example:
>>> from google.cloud import scheduler_v1beta1
>>>
>>> client = scheduler_v1beta1.CloudSchedulerClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `job`:
>>> job = {}
>>>
>>> response = client.create_job(parent, job)
Args:
parent (str): Required.
The location name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID``.
job (Union[dict, ~google.cloud.scheduler_v1beta1.types.Job]): Required.
The job to add. The user can optionally specify a name for the job in
``name``. ``name`` cannot be the same as an existing job. If a name is
not specified then the system will generate a random unique name that
will be returned (``name``) in the response.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.scheduler_v1beta1.types.Job`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.scheduler_v1beta1.types.Job` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
def create_job(
self,
parent,
job,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a job.
Example:
>>> from google.cloud import scheduler_v1beta1
>>>
>>> client = scheduler_v1beta1.CloudSchedulerClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `job`:
>>> job = {}
>>>
>>> response = client.create_job(parent, job)
Args:
parent (str): Required.
The location name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID``.
job (Union[dict, ~google.cloud.scheduler_v1beta1.types.Job]): Required.
The job to add. The user can optionally specify a name for the job in
``name``. ``name`` cannot be the same as an existing job. If a name is
not specified then the system will generate a random unique name that
will be returned (``name``) in the response.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.scheduler_v1beta1.types.Job`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.scheduler_v1beta1.types.Job` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_job" not in self._inner_api_calls:
self._inner_api_calls[
"create_job"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_job,
default_retry=self._method_configs["CreateJob"].retry,
default_timeout=self._method_configs["CreateJob"].timeout,
client_info=self._client_info,
)
request = cloudscheduler_pb2.CreateJobRequest(parent=parent, job=job)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_job"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
A :class:`~google.cloud.bigquery.model.ModelReference` pointing to
this model.
Read-only.
Returns:
google.cloud.bigquery.model.ModelReference: pointer to this model.
def reference(self):
"""A :class:`~google.cloud.bigquery.model.ModelReference` pointing to
this model.
Read-only.
Returns:
google.cloud.bigquery.model.ModelReference: pointer to this model.
"""
ref = ModelReference()
ref._proto = self._proto.model_reference
return ref |
Union[datetime.datetime, None]: Datetime at which the model was
created (:data:`None` until set from the server).
Read-only.
def created(self):
"""Union[datetime.datetime, None]: Datetime at which the model was
created (:data:`None` until set from the server).
Read-only.
"""
value = self._proto.creation_time
if value is not None and value != 0:
# value will be in milliseconds.
return google.cloud._helpers._datetime_from_microseconds(
1000.0 * float(value)
) |
Union[datetime.datetime, None]: Datetime at which the model was last
modified (:data:`None` until set from the server).
Read-only.
def modified(self):
"""Union[datetime.datetime, None]: Datetime at which the model was last
modified (:data:`None` until set from the server).
Read-only.
"""
value = self._proto.last_modified_time
if value is not None and value != 0:
# value will be in milliseconds.
return google.cloud._helpers._datetime_from_microseconds(
1000.0 * float(value)
) |
Union[datetime.datetime, None]: The datetime when this model
expires. If not present, the model will persist indefinitely. Expired
models will be deleted and their storage reclaimed.
def expires(self):
"""Union[datetime.datetime, None]: The datetime when this model
expires. If not present, the model will persist indefinitely. Expired
models will be deleted and their storage reclaimed.
"""
value = self._properties.get("expirationTime")
if value is not None:
# value will be in milliseconds.
return google.cloud._helpers._datetime_from_microseconds(
1000.0 * float(value)
) |
Factory: construct a model resource given its API representation
Args:
resource (Dict[str, object]):
Model resource representation from the API
Returns:
google.cloud.bigquery.model.Model: Model parsed from ``resource``.
def from_api_repr(cls, resource):
"""Factory: construct a model resource given its API representation
Args:
resource (Dict[str, object]):
Model resource representation from the API
Returns:
google.cloud.bigquery.model.Model: Model parsed from ``resource``.
"""
this = cls(None)
# Convert from millis-from-epoch to timestamp well-known type.
# TODO: Remove this hack once CL 238585470 hits prod.
resource = copy.deepcopy(resource)
for training_run in resource.get("trainingRuns", ()):
start_time = training_run.get("startTime")
if not start_time or "-" in start_time: # Already right format?
continue
start_time = datetime_helpers.from_microseconds(1e3 * float(start_time))
training_run["startTime"] = datetime_helpers.to_rfc3339(start_time)
this._proto = json_format.ParseDict(resource, types.Model())
for key in six.itervalues(cls._PROPERTY_TO_API_FIELD):
# Leave missing keys unset. This allows us to use setdefault in the
# getters where we want a default value other than None.
if key in resource:
this._properties[key] = resource[key]
return this |
str: URL path for the model's APIs.
def path(self):
"""str: URL path for the model's APIs."""
return "/projects/%s/datasets/%s/models/%s" % (
self._proto.project_id,
self._proto.dataset_id,
self._proto.model_id,
) |
Factory: construct a model reference given its API representation
Args:
resource (Dict[str, object]):
Model reference representation returned from the API
Returns:
google.cloud.bigquery.model.ModelReference:
Model reference parsed from ``resource``.
def from_api_repr(cls, resource):
"""Factory: construct a model reference given its API representation
Args:
resource (Dict[str, object]):
Model reference representation returned from the API
Returns:
google.cloud.bigquery.model.ModelReference:
Model reference parsed from ``resource``.
"""
ref = cls()
ref._proto = json_format.ParseDict(resource, types.ModelReference())
return ref |
Construct a model reference from model ID string.
Args:
model_id (str):
A model ID in standard SQL format. If ``default_project``
is not specified, this must included a project ID, dataset
ID, and model ID, each separated by ``.``.
default_project (str):
Optional. The project ID to use when ``model_id`` does not
include a project ID.
Returns:
google.cloud.bigquery.model.ModelReference:
Model reference parsed from ``model_id``.
Raises:
ValueError:
If ``model_id`` is not a fully-qualified table ID in
standard SQL format.
def from_string(cls, model_id, default_project=None):
"""Construct a model reference from model ID string.
Args:
model_id (str):
A model ID in standard SQL format. If ``default_project``
is not specified, this must included a project ID, dataset
ID, and model ID, each separated by ``.``.
default_project (str):
Optional. The project ID to use when ``model_id`` does not
include a project ID.
Returns:
google.cloud.bigquery.model.ModelReference:
Model reference parsed from ``model_id``.
Raises:
ValueError:
If ``model_id`` is not a fully-qualified table ID in
standard SQL format.
"""
proj, dset, model = _helpers._parse_3_part_id(
model_id, default_project=default_project, property_name="model_id"
)
return cls.from_api_repr(
{"projectId": proj, "datasetId": dset, "modelId": model}
) |
Leases tasks from a pull queue for ``lease_duration``.
This method is invoked by the worker to obtain a lease. The worker must
acknowledge the task via ``AcknowledgeTask`` after they have performed
the work associated with the task.
The ``payload`` is intended to store data that the worker needs to
perform the work associated with the task. To return the payloads in the
``response``, set ``response_view`` to ``FULL``.
A maximum of 10 qps of ``LeaseTasks`` requests are allowed per queue.
``RESOURCE_EXHAUSTED`` is returned when this limit is exceeded.
``RESOURCE_EXHAUSTED`` is also returned when
``max_tasks_dispatched_per_second`` is exceeded.
Example:
>>> from google.cloud import tasks_v2beta2
>>>
>>> client = tasks_v2beta2.CloudTasksClient()
>>>
>>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> # TODO: Initialize `lease_duration`:
>>> lease_duration = {}
>>>
>>> response = client.lease_tasks(parent, lease_duration)
Args:
parent (str): Required.
The queue name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
lease_duration (Union[dict, ~google.cloud.tasks_v2beta2.types.Duration]): After the worker has successfully finished the work associated with the
task, the worker must call via ``AcknowledgeTask`` before the
``schedule_time``. Otherwise the task will be returned to a later
``LeaseTasks`` call so that another worker can retry it.
The maximum lease duration is 1 week. ``lease_duration`` will be
truncated to the nearest second.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2beta2.types.Duration`
max_tasks (int): The maximum number of tasks to lease.
The system will make a best effort to return as close to as
``max_tasks`` as possible.
The largest that ``max_tasks`` can be is 1000.
response_view (~google.cloud.tasks_v2beta2.types.View): The response\_view specifies which subset of the ``Task`` will be
returned.
By default response\_view is ``BASIC``; not all information is retrieved
by default because some data, such as payloads, might be desirable to
return only when needed because of its large size or because of the
sensitivity of data that it contains.
Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView``
`Google IAM <https://cloud.google.com/iam/>`___ permission on the
``Task`` resource.
filter_ (str): ``filter`` can be used to specify a subset of tasks to lease.
When ``filter`` is set to ``tag=<my-tag>`` then the ``response`` will
contain only tasks whose ``tag`` is equal to ``<my-tag>``. ``<my-tag>``
must be less than 500 characters.
When ``filter`` is set to ``tag_function=oldest_tag()``, only tasks
which have the same tag as the task with the oldest ``schedule_time``
will be returned.
Grammar Syntax:
- ``filter = "tag=" tag | "tag_function=" function``
- ``tag = string``
- ``function = "oldest_tag()"``
The ``oldest_tag()`` function returns tasks which have the same tag as
the oldest task (ordered by schedule time).
SDK compatibility: Although the SDK allows tags to be either string or
`bytes <https://cloud.google.com/appengine/docs/standard/java/javadoc/com/google/appengine/api/taskqueue/TaskOptions.html#tag-byte:A->`__,
only UTF-8 encoded tags can be used in Cloud Tasks. Tag which aren't
UTF-8 encoded can't be used in the ``filter`` and the task's ``tag``
will be displayed as empty in Cloud Tasks.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2beta2.types.LeaseTasksResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
def lease_tasks(
self,
parent,
lease_duration,
max_tasks=None,
response_view=None,
filter_=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Leases tasks from a pull queue for ``lease_duration``.
This method is invoked by the worker to obtain a lease. The worker must
acknowledge the task via ``AcknowledgeTask`` after they have performed
the work associated with the task.
The ``payload`` is intended to store data that the worker needs to
perform the work associated with the task. To return the payloads in the
``response``, set ``response_view`` to ``FULL``.
A maximum of 10 qps of ``LeaseTasks`` requests are allowed per queue.
``RESOURCE_EXHAUSTED`` is returned when this limit is exceeded.
``RESOURCE_EXHAUSTED`` is also returned when
``max_tasks_dispatched_per_second`` is exceeded.
Example:
>>> from google.cloud import tasks_v2beta2
>>>
>>> client = tasks_v2beta2.CloudTasksClient()
>>>
>>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> # TODO: Initialize `lease_duration`:
>>> lease_duration = {}
>>>
>>> response = client.lease_tasks(parent, lease_duration)
Args:
parent (str): Required.
The queue name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
lease_duration (Union[dict, ~google.cloud.tasks_v2beta2.types.Duration]): After the worker has successfully finished the work associated with the
task, the worker must call via ``AcknowledgeTask`` before the
``schedule_time``. Otherwise the task will be returned to a later
``LeaseTasks`` call so that another worker can retry it.
The maximum lease duration is 1 week. ``lease_duration`` will be
truncated to the nearest second.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2beta2.types.Duration`
max_tasks (int): The maximum number of tasks to lease.
The system will make a best effort to return as close to as
``max_tasks`` as possible.
The largest that ``max_tasks`` can be is 1000.
response_view (~google.cloud.tasks_v2beta2.types.View): The response\_view specifies which subset of the ``Task`` will be
returned.
By default response\_view is ``BASIC``; not all information is retrieved
by default because some data, such as payloads, might be desirable to
return only when needed because of its large size or because of the
sensitivity of data that it contains.
Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView``
`Google IAM <https://cloud.google.com/iam/>`___ permission on the
``Task`` resource.
filter_ (str): ``filter`` can be used to specify a subset of tasks to lease.
When ``filter`` is set to ``tag=<my-tag>`` then the ``response`` will
contain only tasks whose ``tag`` is equal to ``<my-tag>``. ``<my-tag>``
must be less than 500 characters.
When ``filter`` is set to ``tag_function=oldest_tag()``, only tasks
which have the same tag as the task with the oldest ``schedule_time``
will be returned.
Grammar Syntax:
- ``filter = "tag=" tag | "tag_function=" function``
- ``tag = string``
- ``function = "oldest_tag()"``
The ``oldest_tag()`` function returns tasks which have the same tag as
the oldest task (ordered by schedule time).
SDK compatibility: Although the SDK allows tags to be either string or
`bytes <https://cloud.google.com/appengine/docs/standard/java/javadoc/com/google/appengine/api/taskqueue/TaskOptions.html#tag-byte:A->`__,
only UTF-8 encoded tags can be used in Cloud Tasks. Tag which aren't
UTF-8 encoded can't be used in the ``filter`` and the task's ``tag``
will be displayed as empty in Cloud Tasks.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2beta2.types.LeaseTasksResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "lease_tasks" not in self._inner_api_calls:
self._inner_api_calls[
"lease_tasks"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.lease_tasks,
default_retry=self._method_configs["LeaseTasks"].retry,
default_timeout=self._method_configs["LeaseTasks"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.LeaseTasksRequest(
parent=parent,
lease_duration=lease_duration,
max_tasks=max_tasks,
response_view=response_view,
filter=filter_,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["lease_tasks"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
Helper to format a LogRecord in in Stackdriver fluentd format.
:rtype: str
:returns: JSON str to be written to the log file.
def format_stackdriver_json(record, message):
"""Helper to format a LogRecord in in Stackdriver fluentd format.
:rtype: str
:returns: JSON str to be written to the log file.
"""
subsecond, second = math.modf(record.created)
payload = {
"message": message,
"timestamp": {"seconds": int(second), "nanos": int(subsecond * 1e9)},
"thread": record.thread,
"severity": record.levelname,
}
return json.dumps(payload) |
Get trace_id from flask request headers.
:rtype: str
:returns: TraceID in HTTP request headers.
def get_trace_id_from_flask():
"""Get trace_id from flask request headers.
:rtype: str
:returns: TraceID in HTTP request headers.
"""
if flask is None or not flask.request:
return None
header = flask.request.headers.get(_FLASK_TRACE_HEADER)
if header is None:
return None
trace_id = header.split("/", 1)[0]
return trace_id |
Get trace_id from webapp2 request headers.
:rtype: str
:returns: TraceID in HTTP request headers.
def get_trace_id_from_webapp2():
"""Get trace_id from webapp2 request headers.
:rtype: str
:returns: TraceID in HTTP request headers.
"""
if webapp2 is None:
return None
try:
# get_request() succeeds if we're in the middle of a webapp2
# request, or raises an assertion error otherwise:
# "Request global variable is not set".
req = webapp2.get_request()
except AssertionError:
return None
header = req.headers.get(_WEBAPP2_TRACE_HEADER)
if header is None:
return None
trace_id = header.split("/", 1)[0]
return trace_id |
Get trace_id from django request headers.
:rtype: str
:returns: TraceID in HTTP request headers.
def get_trace_id_from_django():
"""Get trace_id from django request headers.
:rtype: str
:returns: TraceID in HTTP request headers.
"""
request = _get_django_request()
if request is None:
return None
header = request.META.get(_DJANGO_TRACE_HEADER)
if header is None:
return None
trace_id = header.split("/", 1)[0]
return trace_id |
Helper to get trace_id from web application request header.
:rtype: str
:returns: TraceID in HTTP request headers.
def get_trace_id():
"""Helper to get trace_id from web application request header.
:rtype: str
:returns: TraceID in HTTP request headers.
"""
checkers = (
get_trace_id_from_django,
get_trace_id_from_flask,
get_trace_id_from_webapp2,
)
for checker in checkers:
trace_id = checker()
if trace_id is not None:
return trace_id
return None |
Return a fully-qualified group string.
def group_path(cls, project, group):
"""Return a fully-qualified group string."""
return google.api_core.path_template.expand(
"projects/{project}/groups/{group}", project=project, group=group
) |
Lists the existing groups.
Example:
>>> from google.cloud import monitoring_v3
>>>
>>> client = monitoring_v3.GroupServiceClient()
>>>
>>> name = client.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in client.list_groups(name):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_groups(name).pages:
... for element in page:
... # process element
... pass
Args:
name (str): The project whose groups are to be listed. The format is
``"projects/{project_id_or_number}"``.
children_of_group (str): A group name: ``"projects/{project_id_or_number}/groups/{group_id}"``.
Returns groups whose ``parentName`` field contains the group name. If no
groups have this parent, the results are empty.
ancestors_of_group (str): A group name: ``"projects/{project_id_or_number}/groups/{group_id}"``.
Returns groups that are ancestors of the specified group. The groups are
returned in order, starting with the immediate parent and ending with
the most distant ancestor. If the specified group has no immediate
parent, the results are empty.
descendants_of_group (str): A group name: ``"projects/{project_id_or_number}/groups/{group_id}"``.
Returns the descendants of the specified group. This is a superset of
the results returned by the ``childrenOfGroup`` filter, and includes
children-of-children, and so forth.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.monitoring_v3.types.Group` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
def list_groups(
self,
name,
children_of_group=None,
ancestors_of_group=None,
descendants_of_group=None,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists the existing groups.
Example:
>>> from google.cloud import monitoring_v3
>>>
>>> client = monitoring_v3.GroupServiceClient()
>>>
>>> name = client.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in client.list_groups(name):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_groups(name).pages:
... for element in page:
... # process element
... pass
Args:
name (str): The project whose groups are to be listed. The format is
``"projects/{project_id_or_number}"``.
children_of_group (str): A group name: ``"projects/{project_id_or_number}/groups/{group_id}"``.
Returns groups whose ``parentName`` field contains the group name. If no
groups have this parent, the results are empty.
ancestors_of_group (str): A group name: ``"projects/{project_id_or_number}/groups/{group_id}"``.
Returns groups that are ancestors of the specified group. The groups are
returned in order, starting with the immediate parent and ending with
the most distant ancestor. If the specified group has no immediate
parent, the results are empty.
descendants_of_group (str): A group name: ``"projects/{project_id_or_number}/groups/{group_id}"``.
Returns the descendants of the specified group. This is a superset of
the results returned by the ``childrenOfGroup`` filter, and includes
children-of-children, and so forth.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.monitoring_v3.types.Group` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
if metadata is None:
metadata = []
metadata = list(metadata)
# Wrap the transport method to add retry and timeout logic.
if "list_groups" not in self._inner_api_calls:
self._inner_api_calls[
"list_groups"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_groups,
default_retry=self._method_configs["ListGroups"].retry,
default_timeout=self._method_configs["ListGroups"].timeout,
client_info=self._client_info,
)
# Sanity check: We have some fields which are mutually exclusive;
# raise ValueError if more than one is sent.
google.api_core.protobuf_helpers.check_oneof(
children_of_group=children_of_group,
ancestors_of_group=ancestors_of_group,
descendants_of_group=descendants_of_group,
)
request = group_service_pb2.ListGroupsRequest(
name=name,
children_of_group=children_of_group,
ancestors_of_group=ancestors_of_group,
descendants_of_group=descendants_of_group,
page_size=page_size,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_groups"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="group",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator |
Returns a list of ``Voice`` supported for synthesis.
Example:
>>> from google.cloud import texttospeech_v1beta1
>>>
>>> client = texttospeech_v1beta1.TextToSpeechClient()
>>>
>>> response = client.list_voices()
Args:
language_code (str): Optional (but recommended)
`BCP-47 <https://www.rfc-editor.org/rfc/bcp/bcp47.txt>`__ language tag.
If specified, the ListVoices call will only return voices that can be
used to synthesize this language\_code. E.g. when specifying "en-NZ",
you will get supported "en-*" voices; when specifying "no", you will get
supported "no-*" (Norwegian) and "nb-*" (Norwegian Bokmal) voices;
specifying "zh" will also get supported "cmn-*" voices; specifying
"zh-hk" will also get supported "yue-\*" voices.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.texttospeech_v1beta1.types.ListVoicesResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
def list_voices(
self,
language_code=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Returns a list of ``Voice`` supported for synthesis.
Example:
>>> from google.cloud import texttospeech_v1beta1
>>>
>>> client = texttospeech_v1beta1.TextToSpeechClient()
>>>
>>> response = client.list_voices()
Args:
language_code (str): Optional (but recommended)
`BCP-47 <https://www.rfc-editor.org/rfc/bcp/bcp47.txt>`__ language tag.
If specified, the ListVoices call will only return voices that can be
used to synthesize this language\_code. E.g. when specifying "en-NZ",
you will get supported "en-*" voices; when specifying "no", you will get
supported "no-*" (Norwegian) and "nb-*" (Norwegian Bokmal) voices;
specifying "zh" will also get supported "cmn-*" voices; specifying
"zh-hk" will also get supported "yue-\*" voices.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.texttospeech_v1beta1.types.ListVoicesResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "list_voices" not in self._inner_api_calls:
self._inner_api_calls[
"list_voices"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_voices,
default_retry=self._method_configs["ListVoices"].retry,
default_timeout=self._method_configs["ListVoices"].timeout,
client_info=self._client_info,
)
request = cloud_tts_pb2.ListVoicesRequest(language_code=language_code)
return self._inner_api_calls["list_voices"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
Synthesizes speech synchronously: receive results after all text input
has been processed.
Example:
>>> from google.cloud import texttospeech_v1beta1
>>>
>>> client = texttospeech_v1beta1.TextToSpeechClient()
>>>
>>> # TODO: Initialize `input_`:
>>> input_ = {}
>>>
>>> # TODO: Initialize `voice`:
>>> voice = {}
>>>
>>> # TODO: Initialize `audio_config`:
>>> audio_config = {}
>>>
>>> response = client.synthesize_speech(input_, voice, audio_config)
Args:
input_ (Union[dict, ~google.cloud.texttospeech_v1beta1.types.SynthesisInput]): Required. The Synthesizer requires either plain text or SSML as input.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.texttospeech_v1beta1.types.SynthesisInput`
voice (Union[dict, ~google.cloud.texttospeech_v1beta1.types.VoiceSelectionParams]): Required. The desired voice of the synthesized audio.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.texttospeech_v1beta1.types.VoiceSelectionParams`
audio_config (Union[dict, ~google.cloud.texttospeech_v1beta1.types.AudioConfig]): Required. The configuration of the synthesized audio.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.texttospeech_v1beta1.types.AudioConfig`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.texttospeech_v1beta1.types.SynthesizeSpeechResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
def synthesize_speech(
self,
input_,
voice,
audio_config,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Synthesizes speech synchronously: receive results after all text input
has been processed.
Example:
>>> from google.cloud import texttospeech_v1beta1
>>>
>>> client = texttospeech_v1beta1.TextToSpeechClient()
>>>
>>> # TODO: Initialize `input_`:
>>> input_ = {}
>>>
>>> # TODO: Initialize `voice`:
>>> voice = {}
>>>
>>> # TODO: Initialize `audio_config`:
>>> audio_config = {}
>>>
>>> response = client.synthesize_speech(input_, voice, audio_config)
Args:
input_ (Union[dict, ~google.cloud.texttospeech_v1beta1.types.SynthesisInput]): Required. The Synthesizer requires either plain text or SSML as input.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.texttospeech_v1beta1.types.SynthesisInput`
voice (Union[dict, ~google.cloud.texttospeech_v1beta1.types.VoiceSelectionParams]): Required. The desired voice of the synthesized audio.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.texttospeech_v1beta1.types.VoiceSelectionParams`
audio_config (Union[dict, ~google.cloud.texttospeech_v1beta1.types.AudioConfig]): Required. The configuration of the synthesized audio.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.texttospeech_v1beta1.types.AudioConfig`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.texttospeech_v1beta1.types.SynthesizeSpeechResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "synthesize_speech" not in self._inner_api_calls:
self._inner_api_calls[
"synthesize_speech"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.synthesize_speech,
default_retry=self._method_configs["SynthesizeSpeech"].retry,
default_timeout=self._method_configs["SynthesizeSpeech"].timeout,
client_info=self._client_info,
)
request = cloud_tts_pb2.SynthesizeSpeechRequest(
input=input_, voice=voice, audio_config=audio_config
)
return self._inner_api_calls["synthesize_speech"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
Raise AttributeError if the credentials are unsigned.
:type credentials: :class:`google.auth.credentials.Signing`
:param credentials: The credentials used to create a private key
for signing text.
:raises: :exc:`AttributeError` if credentials is not an instance
of :class:`google.auth.credentials.Signing`.
def ensure_signed_credentials(credentials):
"""Raise AttributeError if the credentials are unsigned.
:type credentials: :class:`google.auth.credentials.Signing`
:param credentials: The credentials used to create a private key
for signing text.
:raises: :exc:`AttributeError` if credentials is not an instance
of :class:`google.auth.credentials.Signing`.
"""
if not isinstance(credentials, google.auth.credentials.Signing):
auth_uri = (
"https://google-cloud-python.readthedocs.io/en/latest/"
"core/auth.html?highlight=authentication#setting-up-"
"a-service-account"
)
raise AttributeError(
"you need a private key to sign credentials."
"the credentials you are currently using %s "
"just contains a token. see %s for more "
"details." % (type(credentials), auth_uri)
) |
Gets query parameters for creating a signed URL.
:type credentials: :class:`google.auth.credentials.Signing`
:param credentials: The credentials used to create a private key
for signing text.
:type expiration: int or long
:param expiration: When the signed URL should expire.
:type string_to_sign: str
:param string_to_sign: The string to be signed by the credentials.
:raises: :exc:`AttributeError` if credentials is not an instance
of :class:`google.auth.credentials.Signing`.
:rtype: dict
:returns: Query parameters matching the signing credentials with a
signed payload.
def get_signed_query_params_v2(credentials, expiration, string_to_sign):
"""Gets query parameters for creating a signed URL.
:type credentials: :class:`google.auth.credentials.Signing`
:param credentials: The credentials used to create a private key
for signing text.
:type expiration: int or long
:param expiration: When the signed URL should expire.
:type string_to_sign: str
:param string_to_sign: The string to be signed by the credentials.
:raises: :exc:`AttributeError` if credentials is not an instance
of :class:`google.auth.credentials.Signing`.
:rtype: dict
:returns: Query parameters matching the signing credentials with a
signed payload.
"""
ensure_signed_credentials(credentials)
signature_bytes = credentials.sign_bytes(string_to_sign)
signature = base64.b64encode(signature_bytes)
service_account_name = credentials.signer_email
return {
"GoogleAccessId": service_account_name,
"Expires": str(expiration),
"Signature": signature,
} |
Convert 'expiration' to a number of seconds in the future.
:type expiration: Union[Integer, datetime.datetime, datetime.timedelta]
:param expiration: Point in time when the signed URL should expire.
:raises: :exc:`TypeError` when expiration is not a valid type.
:rtype: int
:returns: a timestamp as an absolute number of seconds since epoch.
def get_expiration_seconds_v2(expiration):
"""Convert 'expiration' to a number of seconds in the future.
:type expiration: Union[Integer, datetime.datetime, datetime.timedelta]
:param expiration: Point in time when the signed URL should expire.
:raises: :exc:`TypeError` when expiration is not a valid type.
:rtype: int
:returns: a timestamp as an absolute number of seconds since epoch.
"""
# If it's a timedelta, add it to `now` in UTC.
if isinstance(expiration, datetime.timedelta):
now = NOW().replace(tzinfo=_helpers.UTC)
expiration = now + expiration
# If it's a datetime, convert to a timestamp.
if isinstance(expiration, datetime.datetime):
micros = _helpers._microseconds_from_datetime(expiration)
expiration = micros // 10 ** 6
if not isinstance(expiration, six.integer_types):
raise TypeError(
"Expected an integer timestamp, datetime, or "
"timedelta. Got %s" % type(expiration)
)
return expiration |
Convert 'expiration' to a number of seconds offset from the current time.
:type expiration: Union[Integer, datetime.datetime, datetime.timedelta]
:param expiration: Point in time when the signed URL should expire.
:raises: :exc:`TypeError` when expiration is not a valid type.
:raises: :exc:`ValueError` when expiration is too large.
:rtype: Integer
:returns: seconds in the future when the signed URL will expire
def get_expiration_seconds_v4(expiration):
"""Convert 'expiration' to a number of seconds offset from the current time.
:type expiration: Union[Integer, datetime.datetime, datetime.timedelta]
:param expiration: Point in time when the signed URL should expire.
:raises: :exc:`TypeError` when expiration is not a valid type.
:raises: :exc:`ValueError` when expiration is too large.
:rtype: Integer
:returns: seconds in the future when the signed URL will expire
"""
if not isinstance(expiration, _EXPIRATION_TYPES):
raise TypeError(
"Expected an integer timestamp, datetime, or "
"timedelta. Got %s" % type(expiration)
)
now = NOW().replace(tzinfo=_helpers.UTC)
if isinstance(expiration, six.integer_types):
seconds = expiration
if isinstance(expiration, datetime.datetime):
if expiration.tzinfo is None:
expiration = expiration.replace(tzinfo=_helpers.UTC)
expiration = expiration - now
if isinstance(expiration, datetime.timedelta):
seconds = int(expiration.total_seconds())
if seconds > SEVEN_DAYS:
raise ValueError(
"Max allowed expiration interval is seven days (%d seconds)".format(
SEVEN_DAYS
)
)
return seconds |
Canonicalize headers for signing.
See:
https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers
:type headers: Union[dict|List(Tuple(str,str))]
:param headers:
(Optional) Additional HTTP headers to be included as part of the
signed URLs. See:
https://cloud.google.com/storage/docs/xml-api/reference-headers
Requests using the signed URL *must* pass the specified header
(name and value) with each request for the URL.
:rtype: str
:returns: List of headers, normalized / sortted per the URL refernced above.
def get_canonical_headers(headers):
"""Canonicalize headers for signing.
See:
https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers
:type headers: Union[dict|List(Tuple(str,str))]
:param headers:
(Optional) Additional HTTP headers to be included as part of the
signed URLs. See:
https://cloud.google.com/storage/docs/xml-api/reference-headers
Requests using the signed URL *must* pass the specified header
(name and value) with each request for the URL.
:rtype: str
:returns: List of headers, normalized / sortted per the URL refernced above.
"""
if headers is None:
headers = []
elif isinstance(headers, dict):
headers = list(headers.items())
if not headers:
return [], []
normalized = collections.defaultdict(list)
for key, val in headers:
key = key.lower().strip()
val = MULTIPLE_SPACES.sub(" ", val.strip())
normalized[key].append(val)
ordered_headers = sorted((key, ",".join(val)) for key, val in normalized.items())
canonical_headers = ["{}:{}".format(*item) for item in ordered_headers]
return canonical_headers, ordered_headers |
Canonicalize method, resource
:type method: str
:param method: The HTTP verb that will be used when requesting the URL.
Defaults to ``'GET'``. If method is ``'RESUMABLE'`` then the
signature will additionally contain the `x-goog-resumable`
header, and the method changed to POST. See the signed URL
docs regarding this flow:
https://cloud.google.com/storage/docs/access-control/signed-urls
:type resource: str
:param resource: A pointer to a specific resource
(typically, ``/bucket-name/path/to/blob.txt``).
:type query_parameters: dict
:param query_parameters:
(Optional) Additional query paramtersto be included as part of the
signed URLs. See:
https://cloud.google.com/storage/docs/xml-api/reference-headers#query
:type headers: Union[dict|List(Tuple(str,str))]
:param headers:
(Optional) Additional HTTP headers to be included as part of the
signed URLs. See:
https://cloud.google.com/storage/docs/xml-api/reference-headers
Requests using the signed URL *must* pass the specified header
(name and value) with each request for the URL.
:rtype: :class:_Canonical
:returns: Canonical method, resource, query_parameters, and headers.
def canonicalize(method, resource, query_parameters, headers):
"""Canonicalize method, resource
:type method: str
:param method: The HTTP verb that will be used when requesting the URL.
Defaults to ``'GET'``. If method is ``'RESUMABLE'`` then the
signature will additionally contain the `x-goog-resumable`
header, and the method changed to POST. See the signed URL
docs regarding this flow:
https://cloud.google.com/storage/docs/access-control/signed-urls
:type resource: str
:param resource: A pointer to a specific resource
(typically, ``/bucket-name/path/to/blob.txt``).
:type query_parameters: dict
:param query_parameters:
(Optional) Additional query paramtersto be included as part of the
signed URLs. See:
https://cloud.google.com/storage/docs/xml-api/reference-headers#query
:type headers: Union[dict|List(Tuple(str,str))]
:param headers:
(Optional) Additional HTTP headers to be included as part of the
signed URLs. See:
https://cloud.google.com/storage/docs/xml-api/reference-headers
Requests using the signed URL *must* pass the specified header
(name and value) with each request for the URL.
:rtype: :class:_Canonical
:returns: Canonical method, resource, query_parameters, and headers.
"""
headers, _ = get_canonical_headers(headers)
if method == "RESUMABLE":
method = "POST"
headers.append("x-goog-resumable:start")
if query_parameters is None:
return _Canonical(method, resource, [], headers)
normalized_qp = sorted(
(key.lower(), value and value.strip() or "")
for key, value in query_parameters.items()
)
encoded_qp = six.moves.urllib.parse.urlencode(normalized_qp)
canonical_resource = "{}?{}".format(resource, encoded_qp)
return _Canonical(method, canonical_resource, normalized_qp, headers) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.