text
stringlengths
81
112k
Sample ID: go/samples-tracker/1533 def update_model(client, model_id): """Sample ID: go/samples-tracker/1533""" # [START bigquery_update_model_description] from google.cloud import bigquery # TODO(developer): Construct a BigQuery client object. # client = bigquery.Client() # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' model = client.get_model(model_id) model.description = "This model was modified from a Python program." model = client.update_model(model, ["description"]) full_model_id = "{}.{}.{}".format(model.project, model.dataset_id, model.model_id) print( "Updated model '{}' with description '{}'.".format( full_model_id, model.description ) )
Convert a Query instance to the corresponding protobuf. :type query: :class:`Query` :param query: The source query. :rtype: :class:`.query_pb2.Query` :returns: A protobuf that can be sent to the protobuf API. N.b. that it does not contain "in-flight" fields for ongoing query executions (cursors, offset, limit). def _pb_from_query(query): """Convert a Query instance to the corresponding protobuf. :type query: :class:`Query` :param query: The source query. :rtype: :class:`.query_pb2.Query` :returns: A protobuf that can be sent to the protobuf API. N.b. that it does not contain "in-flight" fields for ongoing query executions (cursors, offset, limit). """ pb = query_pb2.Query() for projection_name in query.projection: pb.projection.add().property.name = projection_name if query.kind: pb.kind.add().name = query.kind composite_filter = pb.filter.composite_filter composite_filter.op = query_pb2.CompositeFilter.AND if query.ancestor: ancestor_pb = query.ancestor.to_protobuf() # Filter on __key__ HAS_ANCESTOR == ancestor. ancestor_filter = composite_filter.filters.add().property_filter ancestor_filter.property.name = "__key__" ancestor_filter.op = query_pb2.PropertyFilter.HAS_ANCESTOR ancestor_filter.value.key_value.CopyFrom(ancestor_pb) for property_name, operator, value in query.filters: pb_op_enum = query.OPERATORS.get(operator) # Add the specific filter property_filter = composite_filter.filters.add().property_filter property_filter.property.name = property_name property_filter.op = pb_op_enum # Set the value to filter on based on the type. if property_name == "__key__": key_pb = value.to_protobuf() property_filter.value.key_value.CopyFrom(key_pb) else: helpers._set_protobuf_value(property_filter.value, value) if not composite_filter.filters: pb.ClearField("filter") for prop in query.order: property_order = pb.order.add() if prop.startswith("-"): property_order.property.name = prop[1:] property_order.direction = property_order.DESCENDING else: property_order.property.name = prop property_order.direction = property_order.ASCENDING for distinct_on_name in query.distinct_on: pb.distinct_on.add().name = distinct_on_name return pb
Update the query's namespace. :type value: str def namespace(self, value): """Update the query's namespace. :type value: str """ if not isinstance(value, str): raise ValueError("Namespace must be a string") self._namespace = value
Update the Kind of the Query. :type value: str :param value: updated kind for the query. .. note:: The protobuf specification allows for ``kind`` to be repeated, but the current implementation returns an error if more than one value is passed. If the back-end changes in the future to allow multiple values, this method will be updated to allow passing either a string or a sequence of strings. def kind(self, value): """Update the Kind of the Query. :type value: str :param value: updated kind for the query. .. note:: The protobuf specification allows for ``kind`` to be repeated, but the current implementation returns an error if more than one value is passed. If the back-end changes in the future to allow multiple values, this method will be updated to allow passing either a string or a sequence of strings. """ if not isinstance(value, str): raise TypeError("Kind must be a string") self._kind = value
Set the ancestor for the query :type value: :class:`~google.cloud.datastore.key.Key` :param value: the new ancestor key def ancestor(self, value): """Set the ancestor for the query :type value: :class:`~google.cloud.datastore.key.Key` :param value: the new ancestor key """ if not isinstance(value, Key): raise TypeError("Ancestor must be a Key") self._ancestor = value
Filter the query based on a property name, operator and a value. Expressions take the form of:: .add_filter('<property>', '<operator>', <value>) where property is a property stored on the entity in the datastore and operator is one of ``OPERATORS`` (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``):: >>> from google.cloud import datastore >>> client = datastore.Client() >>> query = client.query(kind='Person') >>> query.add_filter('name', '=', 'James') >>> query.add_filter('age', '>', 50) :type property_name: str :param property_name: A property name. :type operator: str :param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``. :type value: :class:`int`, :class:`str`, :class:`bool`, :class:`float`, :class:`NoneType`, :class:`datetime.datetime`, :class:`google.cloud.datastore.key.Key` :param value: The value to filter on. :raises: :class:`ValueError` if ``operation`` is not one of the specified values, or if a filter names ``'__key__'`` but passes an invalid value (a key is required). def add_filter(self, property_name, operator, value): """Filter the query based on a property name, operator and a value. Expressions take the form of:: .add_filter('<property>', '<operator>', <value>) where property is a property stored on the entity in the datastore and operator is one of ``OPERATORS`` (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``):: >>> from google.cloud import datastore >>> client = datastore.Client() >>> query = client.query(kind='Person') >>> query.add_filter('name', '=', 'James') >>> query.add_filter('age', '>', 50) :type property_name: str :param property_name: A property name. :type operator: str :param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``. :type value: :class:`int`, :class:`str`, :class:`bool`, :class:`float`, :class:`NoneType`, :class:`datetime.datetime`, :class:`google.cloud.datastore.key.Key` :param value: The value to filter on. :raises: :class:`ValueError` if ``operation`` is not one of the specified values, or if a filter names ``'__key__'`` but passes an invalid value (a key is required). """ if self.OPERATORS.get(operator) is None: error_message = 'Invalid expression: "%s"' % (operator,) choices_message = "Please use one of: =, <, <=, >, >=." raise ValueError(error_message, choices_message) if property_name == "__key__" and not isinstance(value, Key): raise ValueError('Invalid key: "%s"' % value) self._filters.append((property_name, operator, value))
Set the fields returned the query. :type projection: str or sequence of strings :param projection: Each value is a string giving the name of a property to be included in the projection query. def projection(self, projection): """Set the fields returned the query. :type projection: str or sequence of strings :param projection: Each value is a string giving the name of a property to be included in the projection query. """ if isinstance(projection, str): projection = [projection] self._projection[:] = projection
Set the fields used to sort query results. Sort fields will be applied in the order specified. :type value: str or sequence of strings :param value: Each value is a string giving the name of the property on which to sort, optionally preceded by a hyphen (-) to specify descending order. Omitting the hyphen implies ascending order. def order(self, value): """Set the fields used to sort query results. Sort fields will be applied in the order specified. :type value: str or sequence of strings :param value: Each value is a string giving the name of the property on which to sort, optionally preceded by a hyphen (-) to specify descending order. Omitting the hyphen implies ascending order. """ if isinstance(value, str): value = [value] self._order[:] = value
Set fields used to group query results. :type value: str or sequence of strings :param value: Each value is a string giving the name of a property to use to group results together. def distinct_on(self, value): """Set fields used to group query results. :type value: str or sequence of strings :param value: Each value is a string giving the name of a property to use to group results together. """ if isinstance(value, str): value = [value] self._distinct_on[:] = value
Execute the Query; return an iterator for the matching entities. For example:: >>> from google.cloud import datastore >>> client = datastore.Client() >>> query = client.query(kind='Person') >>> query.add_filter('name', '=', 'Sally') >>> list(query.fetch()) [<Entity object>, <Entity object>, ...] >>> list(query.fetch(1)) [<Entity object>] :type limit: int :param limit: (Optional) limit passed through to the iterator. :type offset: int :param offset: (Optional) offset passed through to the iterator. :type start_cursor: bytes :param start_cursor: (Optional) cursor passed through to the iterator. :type end_cursor: bytes :param end_cursor: (Optional) cursor passed through to the iterator. :type client: :class:`google.cloud.datastore.client.Client` :param client: (Optional) client used to connect to datastore. If not supplied, uses the query's value. :type eventual: bool :param eventual: (Optional) Defaults to strongly consistent (False). Setting True will use eventual consistency, but cannot be used inside a transaction or will raise ValueError. :rtype: :class:`Iterator` :returns: The iterator for the query. def fetch( self, limit=None, offset=0, start_cursor=None, end_cursor=None, client=None, eventual=False, ): """Execute the Query; return an iterator for the matching entities. For example:: >>> from google.cloud import datastore >>> client = datastore.Client() >>> query = client.query(kind='Person') >>> query.add_filter('name', '=', 'Sally') >>> list(query.fetch()) [<Entity object>, <Entity object>, ...] >>> list(query.fetch(1)) [<Entity object>] :type limit: int :param limit: (Optional) limit passed through to the iterator. :type offset: int :param offset: (Optional) offset passed through to the iterator. :type start_cursor: bytes :param start_cursor: (Optional) cursor passed through to the iterator. :type end_cursor: bytes :param end_cursor: (Optional) cursor passed through to the iterator. :type client: :class:`google.cloud.datastore.client.Client` :param client: (Optional) client used to connect to datastore. If not supplied, uses the query's value. :type eventual: bool :param eventual: (Optional) Defaults to strongly consistent (False). Setting True will use eventual consistency, but cannot be used inside a transaction or will raise ValueError. :rtype: :class:`Iterator` :returns: The iterator for the query. """ if client is None: client = self._client return Iterator( self, client, limit=limit, offset=offset, start_cursor=start_cursor, end_cursor=end_cursor, eventual=eventual, )
Build a query protobuf. Relies on the current state of the iterator. :rtype: :class:`.query_pb2.Query` :returns: The query protobuf object for the current state of the iterator. def _build_protobuf(self): """Build a query protobuf. Relies on the current state of the iterator. :rtype: :class:`.query_pb2.Query` :returns: The query protobuf object for the current state of the iterator. """ pb = _pb_from_query(self._query) start_cursor = self.next_page_token if start_cursor is not None: pb.start_cursor = base64.urlsafe_b64decode(start_cursor) end_cursor = self._end_cursor if end_cursor is not None: pb.end_cursor = base64.urlsafe_b64decode(end_cursor) if self.max_results is not None: pb.limit.value = self.max_results - self.num_results if start_cursor is None and self._offset is not None: # NOTE: We don't need to add an offset to the request protobuf # if we are using an existing cursor, because the offset # is only relative to the start of the result set, not # relative to each page (this method is called per-page) pb.offset = self._offset return pb
Process the response from a datastore query. :type response_pb: :class:`.datastore_pb2.RunQueryResponse` :param response_pb: The protobuf response from a ``runQuery`` request. :rtype: iterable :returns: The next page of entity results. :raises ValueError: If ``more_results`` is an unexpected value. def _process_query_results(self, response_pb): """Process the response from a datastore query. :type response_pb: :class:`.datastore_pb2.RunQueryResponse` :param response_pb: The protobuf response from a ``runQuery`` request. :rtype: iterable :returns: The next page of entity results. :raises ValueError: If ``more_results`` is an unexpected value. """ self._skipped_results = response_pb.batch.skipped_results if response_pb.batch.more_results == _NO_MORE_RESULTS: self.next_page_token = None else: self.next_page_token = base64.urlsafe_b64encode( response_pb.batch.end_cursor ) self._end_cursor = None if response_pb.batch.more_results == _NOT_FINISHED: self._more_results = True elif response_pb.batch.more_results in _FINISHED: self._more_results = False else: raise ValueError("Unexpected value returned for `more_results`.") return [result.entity for result in response_pb.batch.entity_results]
Get the next page in the iterator. :rtype: :class:`~google.cloud.iterator.Page` :returns: The next page in the iterator (or :data:`None` if there are no pages left). def _next_page(self): """Get the next page in the iterator. :rtype: :class:`~google.cloud.iterator.Page` :returns: The next page in the iterator (or :data:`None` if there are no pages left). """ if not self._more_results: return None query_pb = self._build_protobuf() transaction = self.client.current_transaction if transaction is None: transaction_id = None else: transaction_id = transaction.id read_options = helpers.get_read_options(self._eventual, transaction_id) partition_id = entity_pb2.PartitionId( project_id=self._query.project, namespace_id=self._query.namespace ) response_pb = self.client._datastore_api.run_query( self._query.project, partition_id, read_options, query=query_pb ) entity_pbs = self._process_query_results(response_pb) return page_iterator.Page(self, entity_pbs, self.item_to_value)
Helper for :meth:`Batch.insert` et aliae. :type table: str :param table: Name of the table to be modified. :type columns: list of str :param columns: Name of the table columns to be modified. :type values: list of lists :param values: Values to be modified. :rtype: :class:`google.cloud.spanner_v1.proto.mutation_pb2.Mutation.Write` :returns: Write protobuf def _make_write_pb(table, columns, values): """Helper for :meth:`Batch.insert` et aliae. :type table: str :param table: Name of the table to be modified. :type columns: list of str :param columns: Name of the table columns to be modified. :type values: list of lists :param values: Values to be modified. :rtype: :class:`google.cloud.spanner_v1.proto.mutation_pb2.Mutation.Write` :returns: Write protobuf """ return Mutation.Write( table=table, columns=columns, values=_make_list_value_pbs(values) )
Update one or more existing table rows. :type table: str :param table: Name of the table to be modified. :type columns: list of str :param columns: Name of the table columns to be modified. :type values: list of lists :param values: Values to be modified. def update(self, table, columns, values): """Update one or more existing table rows. :type table: str :param table: Name of the table to be modified. :type columns: list of str :param columns: Name of the table columns to be modified. :type values: list of lists :param values: Values to be modified. """ self._mutations.append(Mutation(update=_make_write_pb(table, columns, values)))
Delete one or more table rows. :type table: str :param table: Name of the table to be modified. :type keyset: :class:`~google.cloud.spanner_v1.keyset.Keyset` :param keyset: Keys/ranges identifying rows to delete. def delete(self, table, keyset): """Delete one or more table rows. :type table: str :param table: Name of the table to be modified. :type keyset: :class:`~google.cloud.spanner_v1.keyset.Keyset` :param keyset: Keys/ranges identifying rows to delete. """ delete = Mutation.Delete(table=table, key_set=keyset._to_pb()) self._mutations.append(Mutation(delete=delete))
Commit mutations to the database. :rtype: datetime :returns: timestamp of the committed changes. def commit(self): """Commit mutations to the database. :rtype: datetime :returns: timestamp of the committed changes. """ self._check_state() database = self._session._database api = database.spanner_api metadata = _metadata_with_prefix(database.name) txn_options = TransactionOptions(read_write=TransactionOptions.ReadWrite()) response = api.commit( self._session.name, self._mutations, single_use_transaction=txn_options, metadata=metadata, ) self.committed = _pb_timestamp_to_datetime(response.commit_timestamp) return self.committed
Return if a dataset exists. Args: client (google.cloud.bigquery.client.Client): A client to connect to the BigQuery API. dataset_reference (google.cloud.bigquery.dataset.DatasetReference): A reference to the dataset to look for. Returns: bool: ``True`` if the dataset exists, ``False`` otherwise. def dataset_exists(client, dataset_reference): """Return if a dataset exists. Args: client (google.cloud.bigquery.client.Client): A client to connect to the BigQuery API. dataset_reference (google.cloud.bigquery.dataset.DatasetReference): A reference to the dataset to look for. Returns: bool: ``True`` if the dataset exists, ``False`` otherwise. """ from google.cloud.exceptions import NotFound try: client.get_dataset(dataset_reference) return True except NotFound: return False
Return if a table exists. Args: client (google.cloud.bigquery.client.Client): A client to connect to the BigQuery API. table_reference (google.cloud.bigquery.table.TableReference): A reference to the table to look for. Returns: bool: ``True`` if the table exists, ``False`` otherwise. def table_exists(client, table_reference): """Return if a table exists. Args: client (google.cloud.bigquery.client.Client): A client to connect to the BigQuery API. table_reference (google.cloud.bigquery.table.TableReference): A reference to the table to look for. Returns: bool: ``True`` if the table exists, ``False`` otherwise. """ from google.cloud.exceptions import NotFound try: client.get_table(table_reference) return True except NotFound: return False
Queries for entities. Example: >>> from google.cloud import datastore_v1 >>> >>> client = datastore_v1.DatastoreClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `partition_id`: >>> partition_id = {} >>> >>> response = client.run_query(project_id, partition_id) Args: project_id (str): The ID of the project against which to make the request. partition_id (Union[dict, ~google.cloud.datastore_v1.types.PartitionId]): Entities are partitioned into subsets, identified by a partition ID. Queries are scoped to a single partition. This partition ID is normalized with the standard default context partition ID. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.PartitionId` read_options (Union[dict, ~google.cloud.datastore_v1.types.ReadOptions]): The options for this query. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.ReadOptions` query (Union[dict, ~google.cloud.datastore_v1.types.Query]): The query to run. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.Query` gql_query (Union[dict, ~google.cloud.datastore_v1.types.GqlQuery]): The GQL query to run. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.GqlQuery` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.datastore_v1.types.RunQueryResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. def run_query( self, project_id, partition_id, read_options=None, query=None, gql_query=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Queries for entities. Example: >>> from google.cloud import datastore_v1 >>> >>> client = datastore_v1.DatastoreClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `partition_id`: >>> partition_id = {} >>> >>> response = client.run_query(project_id, partition_id) Args: project_id (str): The ID of the project against which to make the request. partition_id (Union[dict, ~google.cloud.datastore_v1.types.PartitionId]): Entities are partitioned into subsets, identified by a partition ID. Queries are scoped to a single partition. This partition ID is normalized with the standard default context partition ID. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.PartitionId` read_options (Union[dict, ~google.cloud.datastore_v1.types.ReadOptions]): The options for this query. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.ReadOptions` query (Union[dict, ~google.cloud.datastore_v1.types.Query]): The query to run. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.Query` gql_query (Union[dict, ~google.cloud.datastore_v1.types.GqlQuery]): The GQL query to run. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.GqlQuery` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.datastore_v1.types.RunQueryResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "run_query" not in self._inner_api_calls: self._inner_api_calls[ "run_query" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.run_query, default_retry=self._method_configs["RunQuery"].retry, default_timeout=self._method_configs["RunQuery"].timeout, client_info=self._client_info, ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof(query=query, gql_query=gql_query) request = datastore_pb2.RunQueryRequest( project_id=project_id, partition_id=partition_id, read_options=read_options, query=query, gql_query=gql_query, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("project_id", project_id)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["run_query"]( request, retry=retry, timeout=timeout, metadata=metadata )
Commits a transaction, optionally creating, deleting or modifying some entities. Example: >>> from google.cloud import datastore_v1 >>> from google.cloud.datastore_v1 import enums >>> >>> client = datastore_v1.DatastoreClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `mode`: >>> mode = enums.CommitRequest.Mode.MODE_UNSPECIFIED >>> >>> # TODO: Initialize `mutations`: >>> mutations = [] >>> >>> response = client.commit(project_id, mode, mutations) Args: project_id (str): The ID of the project against which to make the request. mode (~google.cloud.datastore_v1.types.Mode): The type of commit to perform. Defaults to ``TRANSACTIONAL``. mutations (list[Union[dict, ~google.cloud.datastore_v1.types.Mutation]]): The mutations to perform. When mode is ``TRANSACTIONAL``, mutations affecting a single entity are applied in order. The following sequences of mutations affecting a single entity are not permitted in a single ``Commit`` request: - ``insert`` followed by ``insert`` - ``update`` followed by ``insert`` - ``upsert`` followed by ``insert`` - ``delete`` followed by ``update`` When mode is ``NON_TRANSACTIONAL``, no two mutations may affect a single entity. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.Mutation` transaction (bytes): The identifier of the transaction associated with the commit. A transaction identifier is returned by a call to ``Datastore.BeginTransaction``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.datastore_v1.types.CommitResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. def commit( self, project_id, mode, mutations, transaction=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Commits a transaction, optionally creating, deleting or modifying some entities. Example: >>> from google.cloud import datastore_v1 >>> from google.cloud.datastore_v1 import enums >>> >>> client = datastore_v1.DatastoreClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `mode`: >>> mode = enums.CommitRequest.Mode.MODE_UNSPECIFIED >>> >>> # TODO: Initialize `mutations`: >>> mutations = [] >>> >>> response = client.commit(project_id, mode, mutations) Args: project_id (str): The ID of the project against which to make the request. mode (~google.cloud.datastore_v1.types.Mode): The type of commit to perform. Defaults to ``TRANSACTIONAL``. mutations (list[Union[dict, ~google.cloud.datastore_v1.types.Mutation]]): The mutations to perform. When mode is ``TRANSACTIONAL``, mutations affecting a single entity are applied in order. The following sequences of mutations affecting a single entity are not permitted in a single ``Commit`` request: - ``insert`` followed by ``insert`` - ``update`` followed by ``insert`` - ``upsert`` followed by ``insert`` - ``delete`` followed by ``update`` When mode is ``NON_TRANSACTIONAL``, no two mutations may affect a single entity. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.Mutation` transaction (bytes): The identifier of the transaction associated with the commit. A transaction identifier is returned by a call to ``Datastore.BeginTransaction``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.datastore_v1.types.CommitResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "commit" not in self._inner_api_calls: self._inner_api_calls[ "commit" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.commit, default_retry=self._method_configs["Commit"].retry, default_timeout=self._method_configs["Commit"].timeout, client_info=self._client_info, ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof(transaction=transaction) request = datastore_pb2.CommitRequest( project_id=project_id, mode=mode, mutations=mutations, transaction=transaction, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("project_id", project_id)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["commit"]( request, retry=retry, timeout=timeout, metadata=metadata )
Return the GAE resource using the environment variables. :rtype: :class:`~google.cloud.logging.resource.Resource` :returns: Monitored resource for GAE. def get_gae_resource(self): """Return the GAE resource using the environment variables. :rtype: :class:`~google.cloud.logging.resource.Resource` :returns: Monitored resource for GAE. """ gae_resource = Resource( type="gae_app", labels={ "project_id": self.project_id, "module_id": self.module_id, "version_id": self.version_id, }, ) return gae_resource
Return the labels for GAE app. If the trace ID can be detected, it will be included as a label. Currently, no other labels are included. :rtype: dict :returns: Labels for GAE app. def get_gae_labels(self): """Return the labels for GAE app. If the trace ID can be detected, it will be included as a label. Currently, no other labels are included. :rtype: dict :returns: Labels for GAE app. """ gae_labels = {} trace_id = get_trace_id() if trace_id is not None: gae_labels[_TRACE_ID_LABEL] = trace_id return gae_labels
Actually log the specified logging record. Overrides the default emit behavior of ``StreamHandler``. See https://docs.python.org/2/library/logging.html#handler-objects :type record: :class:`logging.LogRecord` :param record: The record to be logged. def emit(self, record): """Actually log the specified logging record. Overrides the default emit behavior of ``StreamHandler``. See https://docs.python.org/2/library/logging.html#handler-objects :type record: :class:`logging.LogRecord` :param record: The record to be logged. """ message = super(AppEngineHandler, self).format(record) gae_labels = self.get_gae_labels() trace_id = ( "projects/%s/traces/%s" % (self.project_id, gae_labels[_TRACE_ID_LABEL]) if _TRACE_ID_LABEL in gae_labels else None ) self.transport.send( record, message, resource=self.resource, labels=gae_labels, trace=trace_id )
Return a fully-qualified service_account string. def service_account_path(cls, project, service_account): """Return a fully-qualified service_account string.""" return google.api_core.path_template.expand( "projects/{project}/serviceAccounts/{service_account}", project=project, service_account=service_account, )
Generates an OAuth 2.0 access token for a service account. Example: >>> from google.cloud import iam_credentials_v1 >>> >>> client = iam_credentials_v1.IAMCredentialsClient() >>> >>> name = client.service_account_path('[PROJECT]', '[SERVICE_ACCOUNT]') >>> >>> # TODO: Initialize `scope`: >>> scope = [] >>> >>> response = client.generate_access_token(name, scope) Args: name (str): The resource name of the service account for which the credentials are requested, in the following format: ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. scope (list[str]): Code to identify the scopes to be included in the OAuth 2.0 access token. See https://developers.google.com/identity/protocols/googlescopes for more information. At least one value required. delegates (list[str]): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its next service account in the chain. The last service account in the chain must be granted the ``roles/iam.serviceAccountTokenCreator`` role on the service account that is specified in the ``name`` field of the request. The delegates must have the following format: ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`` lifetime (Union[dict, ~google.cloud.iam_credentials_v1.types.Duration]): The desired lifetime duration of the access token in seconds. Must be set to a value less than or equal to 3600 (1 hour). If a value is not specified, the token's lifetime will be set to a default value of one hour. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.iam_credentials_v1.types.Duration` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.iam_credentials_v1.types.GenerateAccessTokenResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. def generate_access_token( self, name, scope, delegates=None, lifetime=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Generates an OAuth 2.0 access token for a service account. Example: >>> from google.cloud import iam_credentials_v1 >>> >>> client = iam_credentials_v1.IAMCredentialsClient() >>> >>> name = client.service_account_path('[PROJECT]', '[SERVICE_ACCOUNT]') >>> >>> # TODO: Initialize `scope`: >>> scope = [] >>> >>> response = client.generate_access_token(name, scope) Args: name (str): The resource name of the service account for which the credentials are requested, in the following format: ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. scope (list[str]): Code to identify the scopes to be included in the OAuth 2.0 access token. See https://developers.google.com/identity/protocols/googlescopes for more information. At least one value required. delegates (list[str]): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its next service account in the chain. The last service account in the chain must be granted the ``roles/iam.serviceAccountTokenCreator`` role on the service account that is specified in the ``name`` field of the request. The delegates must have the following format: ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`` lifetime (Union[dict, ~google.cloud.iam_credentials_v1.types.Duration]): The desired lifetime duration of the access token in seconds. Must be set to a value less than or equal to 3600 (1 hour). If a value is not specified, the token's lifetime will be set to a default value of one hour. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.iam_credentials_v1.types.Duration` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.iam_credentials_v1.types.GenerateAccessTokenResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "generate_access_token" not in self._inner_api_calls: self._inner_api_calls[ "generate_access_token" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.generate_access_token, default_retry=self._method_configs["GenerateAccessToken"].retry, default_timeout=self._method_configs["GenerateAccessToken"].timeout, client_info=self._client_info, ) request = common_pb2.GenerateAccessTokenRequest( name=name, scope=scope, delegates=delegates, lifetime=lifetime ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["generate_access_token"]( request, retry=retry, timeout=timeout, metadata=metadata )
Generates an OpenID Connect ID token for a service account. Example: >>> from google.cloud import iam_credentials_v1 >>> >>> client = iam_credentials_v1.IAMCredentialsClient() >>> >>> name = client.service_account_path('[PROJECT]', '[SERVICE_ACCOUNT]') >>> >>> # TODO: Initialize `audience`: >>> audience = '' >>> >>> response = client.generate_id_token(name, audience) Args: name (str): The resource name of the service account for which the credentials are requested, in the following format: ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. audience (str): The audience for the token, such as the API or account that this token grants access to. delegates (list[str]): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its next service account in the chain. The last service account in the chain must be granted the ``roles/iam.serviceAccountTokenCreator`` role on the service account that is specified in the ``name`` field of the request. The delegates must have the following format: ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`` include_email (bool): Include the service account email in the token. If set to ``true``, the token will contain ``email`` and ``email_verified`` claims. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.iam_credentials_v1.types.GenerateIdTokenResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. def generate_id_token( self, name, audience, delegates=None, include_email=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Generates an OpenID Connect ID token for a service account. Example: >>> from google.cloud import iam_credentials_v1 >>> >>> client = iam_credentials_v1.IAMCredentialsClient() >>> >>> name = client.service_account_path('[PROJECT]', '[SERVICE_ACCOUNT]') >>> >>> # TODO: Initialize `audience`: >>> audience = '' >>> >>> response = client.generate_id_token(name, audience) Args: name (str): The resource name of the service account for which the credentials are requested, in the following format: ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. audience (str): The audience for the token, such as the API or account that this token grants access to. delegates (list[str]): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its next service account in the chain. The last service account in the chain must be granted the ``roles/iam.serviceAccountTokenCreator`` role on the service account that is specified in the ``name`` field of the request. The delegates must have the following format: ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`` include_email (bool): Include the service account email in the token. If set to ``true``, the token will contain ``email`` and ``email_verified`` claims. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.iam_credentials_v1.types.GenerateIdTokenResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "generate_id_token" not in self._inner_api_calls: self._inner_api_calls[ "generate_id_token" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.generate_id_token, default_retry=self._method_configs["GenerateIdToken"].retry, default_timeout=self._method_configs["GenerateIdToken"].timeout, client_info=self._client_info, ) request = common_pb2.GenerateIdTokenRequest( name=name, audience=audience, delegates=delegates, include_email=include_email, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["generate_id_token"]( request, retry=retry, timeout=timeout, metadata=metadata )
Signs a blob using a service account's system-managed private key. Example: >>> from google.cloud import iam_credentials_v1 >>> >>> client = iam_credentials_v1.IAMCredentialsClient() >>> >>> name = client.service_account_path('[PROJECT]', '[SERVICE_ACCOUNT]') >>> >>> # TODO: Initialize `payload`: >>> payload = b'' >>> >>> response = client.sign_blob(name, payload) Args: name (str): The resource name of the service account for which the credentials are requested, in the following format: ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. payload (bytes): The bytes to sign. delegates (list[str]): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its next service account in the chain. The last service account in the chain must be granted the ``roles/iam.serviceAccountTokenCreator`` role on the service account that is specified in the ``name`` field of the request. The delegates must have the following format: ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.iam_credentials_v1.types.SignBlobResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. def sign_blob( self, name, payload, delegates=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Signs a blob using a service account's system-managed private key. Example: >>> from google.cloud import iam_credentials_v1 >>> >>> client = iam_credentials_v1.IAMCredentialsClient() >>> >>> name = client.service_account_path('[PROJECT]', '[SERVICE_ACCOUNT]') >>> >>> # TODO: Initialize `payload`: >>> payload = b'' >>> >>> response = client.sign_blob(name, payload) Args: name (str): The resource name of the service account for which the credentials are requested, in the following format: ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. payload (bytes): The bytes to sign. delegates (list[str]): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its next service account in the chain. The last service account in the chain must be granted the ``roles/iam.serviceAccountTokenCreator`` role on the service account that is specified in the ``name`` field of the request. The delegates must have the following format: ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.iam_credentials_v1.types.SignBlobResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "sign_blob" not in self._inner_api_calls: self._inner_api_calls[ "sign_blob" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.sign_blob, default_retry=self._method_configs["SignBlob"].retry, default_timeout=self._method_configs["SignBlob"].timeout, client_info=self._client_info, ) request = common_pb2.SignBlobRequest( name=name, payload=payload, delegates=delegates ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["sign_blob"]( request, retry=retry, timeout=timeout, metadata=metadata )
Exchange a JWT signed by third party identity provider to an OAuth 2.0 access token Example: >>> from google.cloud import iam_credentials_v1 >>> >>> client = iam_credentials_v1.IAMCredentialsClient() >>> >>> name = client.service_account_path('[PROJECT]', '[SERVICE_ACCOUNT]') >>> >>> # TODO: Initialize `scope`: >>> scope = [] >>> >>> # TODO: Initialize `jwt`: >>> jwt = '' >>> >>> response = client.generate_identity_binding_access_token(name, scope, jwt) Args: name (str): The resource name of the service account for which the credentials are requested, in the following format: ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. scope (list[str]): Code to identify the scopes to be included in the OAuth 2.0 access token. See https://developers.google.com/identity/protocols/googlescopes for more information. At least one value required. jwt (str): Required. Input token. Must be in JWT format according to RFC7523 (https://tools.ietf.org/html/rfc7523) and must have 'kid' field in the header. Supported signing algorithms: RS256 (RS512, ES256, ES512 coming soon). Mandatory payload fields (along the lines of RFC 7523, section 3): - iss: issuer of the token. Must provide a discovery document at $iss/.well-known/openid-configuration . The document needs to be formatted according to section 4.2 of the OpenID Connect Discovery 1.0 specification. - iat: Issue time in seconds since epoch. Must be in the past. - exp: Expiration time in seconds since epoch. Must be less than 48 hours after iat. We recommend to create tokens that last shorter than 6 hours to improve security unless business reasons mandate longer expiration times. Shorter token lifetimes are generally more secure since tokens that have been exfiltrated by attackers can be used for a shorter time. you can configure the maximum lifetime of the incoming token in the configuration of the mapper. The resulting Google token will expire within an hour or at "exp", whichever is earlier. - sub: JWT subject, identity asserted in the JWT. - aud: Configured in the mapper policy. By default the service account email. Claims from the incoming token can be transferred into the output token accoding to the mapper configuration. The outgoing claim size is limited. Outgoing claims size must be less than 4kB serialized as JSON without whitespace. Example header: { "alg": "RS256", "kid": "92a4265e14ab04d4d228a48d10d4ca31610936f8" } Example payload: { "iss": "https://accounts.google.com", "iat": 1517963104, "exp": 1517966704, "aud": "https://iamcredentials.googleapis.com/", "sub": "113475438248934895348", "my\_claims": { "additional\_claim": "value" } } retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.iam_credentials_v1.types.GenerateIdentityBindingAccessTokenResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. def generate_identity_binding_access_token( self, name, scope, jwt, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Exchange a JWT signed by third party identity provider to an OAuth 2.0 access token Example: >>> from google.cloud import iam_credentials_v1 >>> >>> client = iam_credentials_v1.IAMCredentialsClient() >>> >>> name = client.service_account_path('[PROJECT]', '[SERVICE_ACCOUNT]') >>> >>> # TODO: Initialize `scope`: >>> scope = [] >>> >>> # TODO: Initialize `jwt`: >>> jwt = '' >>> >>> response = client.generate_identity_binding_access_token(name, scope, jwt) Args: name (str): The resource name of the service account for which the credentials are requested, in the following format: ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. scope (list[str]): Code to identify the scopes to be included in the OAuth 2.0 access token. See https://developers.google.com/identity/protocols/googlescopes for more information. At least one value required. jwt (str): Required. Input token. Must be in JWT format according to RFC7523 (https://tools.ietf.org/html/rfc7523) and must have 'kid' field in the header. Supported signing algorithms: RS256 (RS512, ES256, ES512 coming soon). Mandatory payload fields (along the lines of RFC 7523, section 3): - iss: issuer of the token. Must provide a discovery document at $iss/.well-known/openid-configuration . The document needs to be formatted according to section 4.2 of the OpenID Connect Discovery 1.0 specification. - iat: Issue time in seconds since epoch. Must be in the past. - exp: Expiration time in seconds since epoch. Must be less than 48 hours after iat. We recommend to create tokens that last shorter than 6 hours to improve security unless business reasons mandate longer expiration times. Shorter token lifetimes are generally more secure since tokens that have been exfiltrated by attackers can be used for a shorter time. you can configure the maximum lifetime of the incoming token in the configuration of the mapper. The resulting Google token will expire within an hour or at "exp", whichever is earlier. - sub: JWT subject, identity asserted in the JWT. - aud: Configured in the mapper policy. By default the service account email. Claims from the incoming token can be transferred into the output token accoding to the mapper configuration. The outgoing claim size is limited. Outgoing claims size must be less than 4kB serialized as JSON without whitespace. Example header: { "alg": "RS256", "kid": "92a4265e14ab04d4d228a48d10d4ca31610936f8" } Example payload: { "iss": "https://accounts.google.com", "iat": 1517963104, "exp": 1517966704, "aud": "https://iamcredentials.googleapis.com/", "sub": "113475438248934895348", "my\_claims": { "additional\_claim": "value" } } retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.iam_credentials_v1.types.GenerateIdentityBindingAccessTokenResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "generate_identity_binding_access_token" not in self._inner_api_calls: self._inner_api_calls[ "generate_identity_binding_access_token" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.generate_identity_binding_access_token, default_retry=self._method_configs[ "GenerateIdentityBindingAccessToken" ].retry, default_timeout=self._method_configs[ "GenerateIdentityBindingAccessToken" ].timeout, client_info=self._client_info, ) request = common_pb2.GenerateIdentityBindingAccessTokenRequest( name=name, scope=scope, jwt=jwt ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["generate_identity_binding_access_token"]( request, retry=retry, timeout=timeout, metadata=metadata )
Return a fully-qualified entry string. def entry_path(cls, project, location, entry_group, entry): """Return a fully-qualified entry string.""" return google.api_core.path_template.expand( "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}", project=project, location=location, entry_group=entry_group, entry=entry, )
Get an entry by target resource name. This method allows clients to use the resource name from the source Google Cloud Platform service to get the Cloud Data Catalog Entry. Example: >>> from google.cloud import datacatalog_v1beta1 >>> >>> client = datacatalog_v1beta1.DataCatalogClient() >>> >>> response = client.lookup_entry() Args: linked_resource (str): The full name of the Google Cloud Platform resource the Data Catalog entry represents. See: https://cloud.google.com/apis/design/resource\_names#full\_resource\_name Full names are case-sensitive. Examples: "//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId". "//pubsub.googleapis.com/projects/projectId/topics/topicId" sql_resource (str): The SQL name of the entry. SQL names are case-sensitive. Examples: 1. cloud\_pubsub.project\_id.topic\_id 2. bigquery.project\_id.dataset\_id.table\_id 3. datacatalog.project\_id.location\_id.entry\_group\_id.entry\_id retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.datacatalog_v1beta1.types.Entry` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. def lookup_entry( self, linked_resource=None, sql_resource=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Get an entry by target resource name. This method allows clients to use the resource name from the source Google Cloud Platform service to get the Cloud Data Catalog Entry. Example: >>> from google.cloud import datacatalog_v1beta1 >>> >>> client = datacatalog_v1beta1.DataCatalogClient() >>> >>> response = client.lookup_entry() Args: linked_resource (str): The full name of the Google Cloud Platform resource the Data Catalog entry represents. See: https://cloud.google.com/apis/design/resource\_names#full\_resource\_name Full names are case-sensitive. Examples: "//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId". "//pubsub.googleapis.com/projects/projectId/topics/topicId" sql_resource (str): The SQL name of the entry. SQL names are case-sensitive. Examples: 1. cloud\_pubsub.project\_id.topic\_id 2. bigquery.project\_id.dataset\_id.table\_id 3. datacatalog.project\_id.location\_id.entry\_group\_id.entry\_id retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.datacatalog_v1beta1.types.Entry` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "lookup_entry" not in self._inner_api_calls: self._inner_api_calls[ "lookup_entry" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.lookup_entry, default_retry=self._method_configs["LookupEntry"].retry, default_timeout=self._method_configs["LookupEntry"].timeout, client_info=self._client_info, ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof( linked_resource=linked_resource, sql_resource=sql_resource ) request = datacatalog_pb2.LookupEntryRequest( linked_resource=linked_resource, sql_resource=sql_resource ) return self._inner_api_calls["lookup_entry"]( request, retry=retry, timeout=timeout, metadata=metadata )
Convert a path tuple into a full path string. Of the form: ``projects/{project_id}/databases/{database_id}/... documents/{document_path}`` Args: client (~.firestore_v1beta1.client.Client): The client that holds configuration details and a GAPIC client object. path (Tuple[str, ...]): The components in a document path. Returns: str: The fully-qualified document path. def _get_document_path(client, path): """Convert a path tuple into a full path string. Of the form: ``projects/{project_id}/databases/{database_id}/... documents/{document_path}`` Args: client (~.firestore_v1beta1.client.Client): The client that holds configuration details and a GAPIC client object. path (Tuple[str, ...]): The components in a document path. Returns: str: The fully-qualified document path. """ parts = (client._database_string, "documents") + path return _helpers.DOCUMENT_PATH_DELIMITER.join(parts)
Consume a gRPC stream that should contain a single response. The stream will correspond to a ``BatchGetDocuments`` request made for a single document. Args: response_iterator (~google.cloud.exceptions.GrpcRendezvous): A streaming iterator returned from a ``BatchGetDocuments`` request. Returns: ~google.cloud.proto.firestore.v1beta1.\ firestore_pb2.BatchGetDocumentsResponse: The single "get" response in the batch. Raises: ValueError: If anything other than exactly one response is returned. def _consume_single_get(response_iterator): """Consume a gRPC stream that should contain a single response. The stream will correspond to a ``BatchGetDocuments`` request made for a single document. Args: response_iterator (~google.cloud.exceptions.GrpcRendezvous): A streaming iterator returned from a ``BatchGetDocuments`` request. Returns: ~google.cloud.proto.firestore.v1beta1.\ firestore_pb2.BatchGetDocumentsResponse: The single "get" response in the batch. Raises: ValueError: If anything other than exactly one response is returned. """ # Calling ``list()`` consumes the entire iterator. all_responses = list(response_iterator) if len(all_responses) != 1: raise ValueError( "Unexpected response from `BatchGetDocumentsResponse`", all_responses, "Expected only one result", ) return all_responses[0]
Create and cache the full path for this document. Of the form: ``projects/{project_id}/databases/{database_id}/... documents/{document_path}`` Returns: str: The full document path. Raises: ValueError: If the current document reference has no ``client``. def _document_path(self): """Create and cache the full path for this document. Of the form: ``projects/{project_id}/databases/{database_id}/... documents/{document_path}`` Returns: str: The full document path. Raises: ValueError: If the current document reference has no ``client``. """ if self._document_path_internal is None: if self._client is None: raise ValueError("A document reference requires a `client`.") self._document_path_internal = _get_document_path(self._client, self._path) return self._document_path_internal
Create a sub-collection underneath the current document. Args: collection_id (str): The sub-collection identifier (sometimes referred to as the "kind"). Returns: ~.firestore_v1beta1.collection.CollectionReference: The child collection. def collection(self, collection_id): """Create a sub-collection underneath the current document. Args: collection_id (str): The sub-collection identifier (sometimes referred to as the "kind"). Returns: ~.firestore_v1beta1.collection.CollectionReference: The child collection. """ child_path = self._path + (collection_id,) return self._client.collection(*child_path)
Create the current document in the Firestore database. Args: document_data (dict): Property names and values to use for creating a document. Returns: google.cloud.firestore_v1beta1.types.WriteResult: The write result corresponding to the committed document. A write result contains an ``update_time`` field. Raises: ~google.cloud.exceptions.Conflict: If the document already exists. def create(self, document_data): """Create the current document in the Firestore database. Args: document_data (dict): Property names and values to use for creating a document. Returns: google.cloud.firestore_v1beta1.types.WriteResult: The write result corresponding to the committed document. A write result contains an ``update_time`` field. Raises: ~google.cloud.exceptions.Conflict: If the document already exists. """ batch = self._client.batch() batch.create(self, document_data) write_results = batch.commit() return _first_write_result(write_results)
Replace the current document in the Firestore database. A write ``option`` can be specified to indicate preconditions of the "set" operation. If no ``option`` is specified and this document doesn't exist yet, this method will create it. Overwrites all content for the document with the fields in ``document_data``. This method performs almost the same functionality as :meth:`create`. The only difference is that this method doesn't make any requirements on the existence of the document (unless ``option`` is used), whereas as :meth:`create` will fail if the document already exists. Args: document_data (dict): Property names and values to use for replacing a document. merge (Optional[bool] or Optional[List<apispec>]): If True, apply merging instead of overwriting the state of the document. Returns: google.cloud.firestore_v1beta1.types.WriteResult: The write result corresponding to the committed document. A write result contains an ``update_time`` field. def set(self, document_data, merge=False): """Replace the current document in the Firestore database. A write ``option`` can be specified to indicate preconditions of the "set" operation. If no ``option`` is specified and this document doesn't exist yet, this method will create it. Overwrites all content for the document with the fields in ``document_data``. This method performs almost the same functionality as :meth:`create`. The only difference is that this method doesn't make any requirements on the existence of the document (unless ``option`` is used), whereas as :meth:`create` will fail if the document already exists. Args: document_data (dict): Property names and values to use for replacing a document. merge (Optional[bool] or Optional[List<apispec>]): If True, apply merging instead of overwriting the state of the document. Returns: google.cloud.firestore_v1beta1.types.WriteResult: The write result corresponding to the committed document. A write result contains an ``update_time`` field. """ batch = self._client.batch() batch.set(self, document_data, merge=merge) write_results = batch.commit() return _first_write_result(write_results)
Update an existing document in the Firestore database. By default, this method verifies that the document exists on the server before making updates. A write ``option`` can be specified to override these preconditions. Each key in ``field_updates`` can either be a field name or a **field path** (For more information on **field paths**, see :meth:`~.firestore_v1beta1.client.Client.field_path`.) To illustrate this, consider a document with .. code-block:: python >>> snapshot = document.get() >>> snapshot.to_dict() { 'foo': { 'bar': 'baz', }, 'other': True, } stored on the server. If the field name is used in the update: .. code-block:: python >>> field_updates = { ... 'foo': { ... 'quux': 800, ... }, ... } >>> document.update(field_updates) then all of ``foo`` will be overwritten on the server and the new value will be .. code-block:: python >>> snapshot = document.get() >>> snapshot.to_dict() { 'foo': { 'quux': 800, }, 'other': True, } On the other hand, if a ``.``-delimited **field path** is used in the update: .. code-block:: python >>> field_updates = { ... 'foo.quux': 800, ... } >>> document.update(field_updates) then only ``foo.quux`` will be updated on the server and the field ``foo.bar`` will remain intact: .. code-block:: python >>> snapshot = document.get() >>> snapshot.to_dict() { 'foo': { 'bar': 'baz', 'quux': 800, }, 'other': True, } .. warning:: A **field path** can only be used as a top-level key in ``field_updates``. To delete / remove a field from an existing document, use the :attr:`~.firestore_v1beta1.transforms.DELETE_FIELD` sentinel. So with the example above, sending .. code-block:: python >>> field_updates = { ... 'other': firestore.DELETE_FIELD, ... } >>> document.update(field_updates) would update the value on the server to: .. code-block:: python >>> snapshot = document.get() >>> snapshot.to_dict() { 'foo': { 'bar': 'baz', }, } To set a field to the current time on the server when the update is received, use the :attr:`~.firestore_v1beta1.transforms.SERVER_TIMESTAMP` sentinel. Sending .. code-block:: python >>> field_updates = { ... 'foo.now': firestore.SERVER_TIMESTAMP, ... } >>> document.update(field_updates) would update the value on the server to: .. code-block:: python >>> snapshot = document.get() >>> snapshot.to_dict() { 'foo': { 'bar': 'baz', 'now': datetime.datetime(2012, ...), }, 'other': True, } Args: field_updates (dict): Field names or paths to update and values to update with. option (Optional[~.firestore_v1beta1.client.WriteOption]): A write option to make assertions / preconditions on the server state of the document before applying changes. Returns: google.cloud.firestore_v1beta1.types.WriteResult: The write result corresponding to the updated document. A write result contains an ``update_time`` field. Raises: ~google.cloud.exceptions.NotFound: If the document does not exist. def update(self, field_updates, option=None): """Update an existing document in the Firestore database. By default, this method verifies that the document exists on the server before making updates. A write ``option`` can be specified to override these preconditions. Each key in ``field_updates`` can either be a field name or a **field path** (For more information on **field paths**, see :meth:`~.firestore_v1beta1.client.Client.field_path`.) To illustrate this, consider a document with .. code-block:: python >>> snapshot = document.get() >>> snapshot.to_dict() { 'foo': { 'bar': 'baz', }, 'other': True, } stored on the server. If the field name is used in the update: .. code-block:: python >>> field_updates = { ... 'foo': { ... 'quux': 800, ... }, ... } >>> document.update(field_updates) then all of ``foo`` will be overwritten on the server and the new value will be .. code-block:: python >>> snapshot = document.get() >>> snapshot.to_dict() { 'foo': { 'quux': 800, }, 'other': True, } On the other hand, if a ``.``-delimited **field path** is used in the update: .. code-block:: python >>> field_updates = { ... 'foo.quux': 800, ... } >>> document.update(field_updates) then only ``foo.quux`` will be updated on the server and the field ``foo.bar`` will remain intact: .. code-block:: python >>> snapshot = document.get() >>> snapshot.to_dict() { 'foo': { 'bar': 'baz', 'quux': 800, }, 'other': True, } .. warning:: A **field path** can only be used as a top-level key in ``field_updates``. To delete / remove a field from an existing document, use the :attr:`~.firestore_v1beta1.transforms.DELETE_FIELD` sentinel. So with the example above, sending .. code-block:: python >>> field_updates = { ... 'other': firestore.DELETE_FIELD, ... } >>> document.update(field_updates) would update the value on the server to: .. code-block:: python >>> snapshot = document.get() >>> snapshot.to_dict() { 'foo': { 'bar': 'baz', }, } To set a field to the current time on the server when the update is received, use the :attr:`~.firestore_v1beta1.transforms.SERVER_TIMESTAMP` sentinel. Sending .. code-block:: python >>> field_updates = { ... 'foo.now': firestore.SERVER_TIMESTAMP, ... } >>> document.update(field_updates) would update the value on the server to: .. code-block:: python >>> snapshot = document.get() >>> snapshot.to_dict() { 'foo': { 'bar': 'baz', 'now': datetime.datetime(2012, ...), }, 'other': True, } Args: field_updates (dict): Field names or paths to update and values to update with. option (Optional[~.firestore_v1beta1.client.WriteOption]): A write option to make assertions / preconditions on the server state of the document before applying changes. Returns: google.cloud.firestore_v1beta1.types.WriteResult: The write result corresponding to the updated document. A write result contains an ``update_time`` field. Raises: ~google.cloud.exceptions.NotFound: If the document does not exist. """ batch = self._client.batch() batch.update(self, field_updates, option=option) write_results = batch.commit() return _first_write_result(write_results)
Delete the current document in the Firestore database. Args: option (Optional[~.firestore_v1beta1.client.WriteOption]): A write option to make assertions / preconditions on the server state of the document before applying changes. Returns: google.protobuf.timestamp_pb2.Timestamp: The time that the delete request was received by the server. If the document did not exist when the delete was sent (i.e. nothing was deleted), this method will still succeed and will still return the time that the request was received by the server. def delete(self, option=None): """Delete the current document in the Firestore database. Args: option (Optional[~.firestore_v1beta1.client.WriteOption]): A write option to make assertions / preconditions on the server state of the document before applying changes. Returns: google.protobuf.timestamp_pb2.Timestamp: The time that the delete request was received by the server. If the document did not exist when the delete was sent (i.e. nothing was deleted), this method will still succeed and will still return the time that the request was received by the server. """ write_pb = _helpers.pb_for_delete(self._document_path, option) commit_response = self._client._firestore_api.commit( self._client._database_string, [write_pb], transaction=None, metadata=self._client._rpc_metadata, ) return commit_response.commit_time
Retrieve a snapshot of the current document. See :meth:`~.firestore_v1beta1.client.Client.field_path` for more information on **field paths**. If a ``transaction`` is used and it already has write operations added, this method cannot be used (i.e. read-after-write is not allowed). Args: field_paths (Optional[Iterable[str, ...]]): An iterable of field paths (``.``-delimited list of field names) to use as a projection of document fields in the returned results. If no value is provided, all fields will be returned. transaction (Optional[~.firestore_v1beta1.transaction.\ Transaction]): An existing transaction that this reference will be retrieved in. Returns: ~.firestore_v1beta1.document.DocumentSnapshot: A snapshot of the current document. If the document does not exist at the time of `snapshot`, the snapshot `reference`, `data`, `update_time`, and `create_time` attributes will all be `None` and `exists` will be `False`. def get(self, field_paths=None, transaction=None): """Retrieve a snapshot of the current document. See :meth:`~.firestore_v1beta1.client.Client.field_path` for more information on **field paths**. If a ``transaction`` is used and it already has write operations added, this method cannot be used (i.e. read-after-write is not allowed). Args: field_paths (Optional[Iterable[str, ...]]): An iterable of field paths (``.``-delimited list of field names) to use as a projection of document fields in the returned results. If no value is provided, all fields will be returned. transaction (Optional[~.firestore_v1beta1.transaction.\ Transaction]): An existing transaction that this reference will be retrieved in. Returns: ~.firestore_v1beta1.document.DocumentSnapshot: A snapshot of the current document. If the document does not exist at the time of `snapshot`, the snapshot `reference`, `data`, `update_time`, and `create_time` attributes will all be `None` and `exists` will be `False`. """ if isinstance(field_paths, six.string_types): raise ValueError("'field_paths' must be a sequence of paths, not a string.") if field_paths is not None: mask = common_pb2.DocumentMask(field_paths=sorted(field_paths)) else: mask = None firestore_api = self._client._firestore_api try: document_pb = firestore_api.get_document( self._document_path, mask=mask, transaction=_helpers.get_transaction_id(transaction), metadata=self._client._rpc_metadata, ) except exceptions.NotFound: data = None exists = False create_time = None update_time = None else: data = _helpers.decode_dict(document_pb.fields, self._client) exists = True create_time = document_pb.create_time update_time = document_pb.update_time return DocumentSnapshot( reference=self, data=data, exists=exists, read_time=None, # No server read_time available create_time=create_time, update_time=update_time, )
List subcollections of the current document. Args: page_size (Optional[int]]): The maximum number of collections in each page of results from this request. Non-positive values are ignored. Defaults to a sensible value set by the API. Returns: Sequence[~.firestore_v1beta1.collection.CollectionReference]: iterator of subcollections of the current document. If the document does not exist at the time of `snapshot`, the iterator will be empty def collections(self, page_size=None): """List subcollections of the current document. Args: page_size (Optional[int]]): The maximum number of collections in each page of results from this request. Non-positive values are ignored. Defaults to a sensible value set by the API. Returns: Sequence[~.firestore_v1beta1.collection.CollectionReference]: iterator of subcollections of the current document. If the document does not exist at the time of `snapshot`, the iterator will be empty """ iterator = self._client._firestore_api.list_collection_ids( self._document_path, page_size=page_size, metadata=self._client._rpc_metadata, ) iterator.document = self iterator.item_to_value = _item_to_collection_ref return iterator
Get a value from the snapshot data. If the data is nested, for example: .. code-block:: python >>> snapshot.to_dict() { 'top1': { 'middle2': { 'bottom3': 20, 'bottom4': 22, }, 'middle5': True, }, 'top6': b'\x00\x01 foo', } a **field path** can be used to access the nested data. For example: .. code-block:: python >>> snapshot.get('top1') { 'middle2': { 'bottom3': 20, 'bottom4': 22, }, 'middle5': True, } >>> snapshot.get('top1.middle2') { 'bottom3': 20, 'bottom4': 22, } >>> snapshot.get('top1.middle2.bottom3') 20 See :meth:`~.firestore_v1beta1.client.Client.field_path` for more information on **field paths**. A copy is returned since the data may contain mutable values, but the data stored in the snapshot must remain immutable. Args: field_path (str): A field path (``.``-delimited list of field names). Returns: Any or None: (A copy of) the value stored for the ``field_path`` or None if snapshot document does not exist. Raises: KeyError: If the ``field_path`` does not match nested data in the snapshot. def get(self, field_path): """Get a value from the snapshot data. If the data is nested, for example: .. code-block:: python >>> snapshot.to_dict() { 'top1': { 'middle2': { 'bottom3': 20, 'bottom4': 22, }, 'middle5': True, }, 'top6': b'\x00\x01 foo', } a **field path** can be used to access the nested data. For example: .. code-block:: python >>> snapshot.get('top1') { 'middle2': { 'bottom3': 20, 'bottom4': 22, }, 'middle5': True, } >>> snapshot.get('top1.middle2') { 'bottom3': 20, 'bottom4': 22, } >>> snapshot.get('top1.middle2.bottom3') 20 See :meth:`~.firestore_v1beta1.client.Client.field_path` for more information on **field paths**. A copy is returned since the data may contain mutable values, but the data stored in the snapshot must remain immutable. Args: field_path (str): A field path (``.``-delimited list of field names). Returns: Any or None: (A copy of) the value stored for the ``field_path`` or None if snapshot document does not exist. Raises: KeyError: If the ``field_path`` does not match nested data in the snapshot. """ if not self._exists: return None nested_data = field_path_module.get_nested_value(field_path, self._data) return copy.deepcopy(nested_data)
Helper for :meth:`Session.run_in_transaction`. Detect retryable abort, and impose server-supplied delay. :type exc: :class:`google.api_core.exceptions.Aborted` :param exc: exception for aborted transaction :type deadline: float :param deadline: maximum timestamp to continue retrying the transaction. def _delay_until_retry(exc, deadline): """Helper for :meth:`Session.run_in_transaction`. Detect retryable abort, and impose server-supplied delay. :type exc: :class:`google.api_core.exceptions.Aborted` :param exc: exception for aborted transaction :type deadline: float :param deadline: maximum timestamp to continue retrying the transaction. """ cause = exc.errors[0] now = time.time() if now >= deadline: raise delay = _get_retry_delay(cause) if delay is not None: if now + delay > deadline: raise time.sleep(delay)
Helper for :func:`_delay_until_retry`. :type exc: :class:`grpc.Call` :param exc: exception for aborted transaction :rtype: float :returns: seconds to wait before retrying the transaction. def _get_retry_delay(cause): """Helper for :func:`_delay_until_retry`. :type exc: :class:`grpc.Call` :param exc: exception for aborted transaction :rtype: float :returns: seconds to wait before retrying the transaction. """ metadata = dict(cause.trailing_metadata()) retry_info_pb = metadata.get("google.rpc.retryinfo-bin") if retry_info_pb is not None: retry_info = RetryInfo() retry_info.ParseFromString(retry_info_pb) nanos = retry_info.retry_delay.nanos return retry_info.retry_delay.seconds + nanos / 1.0e9
Return a fully-qualified database string. def database_path(cls, project, instance, database): """Return a fully-qualified database string.""" return google.api_core.path_template.expand( "projects/{project}/instances/{instance}/databases/{database}", project=project, instance=instance, database=database, )
Creates a new Cloud Spanner database and starts to prepare it for serving. The returned ``long-running operation`` will have a name of the format ``<database_name>/operations/<operation_id>`` and can be used to track preparation of the database. The ``metadata`` field type is ``CreateDatabaseMetadata``. The ``response`` field type is ``Database``, if successful. Example: >>> from google.cloud import spanner_admin_database_v1 >>> >>> client = spanner_admin_database_v1.DatabaseAdminClient() >>> >>> parent = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> # TODO: Initialize `create_statement`: >>> create_statement = '' >>> >>> response = client.create_database(parent, create_statement) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): Required. The name of the instance that will serve the new database. Values are of the form ``projects/<project>/instances/<instance>``. create_statement (str): Required. A ``CREATE DATABASE`` statement, which specifies the ID of the new database. The database ID must conform to the regular expression ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be between 2 and 30 characters in length. If the database ID is a reserved word or if it contains a hyphen, the database ID must be enclosed in backticks (`````). extra_statements (list[str]): An optional list of DDL statements to run inside the newly created database. Statements can create tables, indexes, etc. These statements execute atomically with the creation of the database: if there is an error in any statement, the database is not created. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_database_v1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. def create_database( self, parent, create_statement, extra_statements=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a new Cloud Spanner database and starts to prepare it for serving. The returned ``long-running operation`` will have a name of the format ``<database_name>/operations/<operation_id>`` and can be used to track preparation of the database. The ``metadata`` field type is ``CreateDatabaseMetadata``. The ``response`` field type is ``Database``, if successful. Example: >>> from google.cloud import spanner_admin_database_v1 >>> >>> client = spanner_admin_database_v1.DatabaseAdminClient() >>> >>> parent = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> # TODO: Initialize `create_statement`: >>> create_statement = '' >>> >>> response = client.create_database(parent, create_statement) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): Required. The name of the instance that will serve the new database. Values are of the form ``projects/<project>/instances/<instance>``. create_statement (str): Required. A ``CREATE DATABASE`` statement, which specifies the ID of the new database. The database ID must conform to the regular expression ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be between 2 and 30 characters in length. If the database ID is a reserved word or if it contains a hyphen, the database ID must be enclosed in backticks (`````). extra_statements (list[str]): An optional list of DDL statements to run inside the newly created database. Statements can create tables, indexes, etc. These statements execute atomically with the creation of the database: if there is an error in any statement, the database is not created. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_database_v1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "create_database" not in self._inner_api_calls: self._inner_api_calls[ "create_database" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_database, default_retry=self._method_configs["CreateDatabase"].retry, default_timeout=self._method_configs["CreateDatabase"].timeout, client_info=self._client_info, ) request = spanner_database_admin_pb2.CreateDatabaseRequest( parent=parent, create_statement=create_statement, extra_statements=extra_statements, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["create_database"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, spanner_database_admin_pb2.Database, metadata_type=spanner_database_admin_pb2.CreateDatabaseMetadata, )
Updates the schema of a Cloud Spanner database by creating/altering/dropping tables, columns, indexes, etc. The returned ``long-running operation`` will have a name of the format ``<database_name>/operations/<operation_id>`` and can be used to track execution of the schema change(s). The ``metadata`` field type is ``UpdateDatabaseDdlMetadata``. The operation has no response. Example: >>> from google.cloud import spanner_admin_database_v1 >>> >>> client = spanner_admin_database_v1.DatabaseAdminClient() >>> >>> database = client.database_path('[PROJECT]', '[INSTANCE]', '[DATABASE]') >>> >>> # TODO: Initialize `statements`: >>> statements = [] >>> >>> response = client.update_database_ddl(database, statements) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: database (str): Required. The database to update. statements (list[str]): DDL statements to be applied to the database. operation_id (str): If empty, the new update request is assigned an automatically-generated operation ID. Otherwise, ``operation_id`` is used to construct the name of the resulting ``Operation``. Specifying an explicit operation ID simplifies determining whether the statements were executed in the event that the ``UpdateDatabaseDdl`` call is replayed, or the return value is otherwise lost: the ``database`` and ``operation_id`` fields can be combined to form the ``name`` of the resulting ``longrunning.Operation``: ``<database>/operations/<operation_id>``. ``operation_id`` should be unique within the database, and must be a valid identifier: ``[a-z][a-z0-9_]*``. Note that automatically-generated operation IDs always begin with an underscore. If the named operation already exists, ``UpdateDatabaseDdl`` returns ``ALREADY_EXISTS``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_database_v1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. def update_database_ddl( self, database, statements, operation_id=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Updates the schema of a Cloud Spanner database by creating/altering/dropping tables, columns, indexes, etc. The returned ``long-running operation`` will have a name of the format ``<database_name>/operations/<operation_id>`` and can be used to track execution of the schema change(s). The ``metadata`` field type is ``UpdateDatabaseDdlMetadata``. The operation has no response. Example: >>> from google.cloud import spanner_admin_database_v1 >>> >>> client = spanner_admin_database_v1.DatabaseAdminClient() >>> >>> database = client.database_path('[PROJECT]', '[INSTANCE]', '[DATABASE]') >>> >>> # TODO: Initialize `statements`: >>> statements = [] >>> >>> response = client.update_database_ddl(database, statements) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: database (str): Required. The database to update. statements (list[str]): DDL statements to be applied to the database. operation_id (str): If empty, the new update request is assigned an automatically-generated operation ID. Otherwise, ``operation_id`` is used to construct the name of the resulting ``Operation``. Specifying an explicit operation ID simplifies determining whether the statements were executed in the event that the ``UpdateDatabaseDdl`` call is replayed, or the return value is otherwise lost: the ``database`` and ``operation_id`` fields can be combined to form the ``name`` of the resulting ``longrunning.Operation``: ``<database>/operations/<operation_id>``. ``operation_id`` should be unique within the database, and must be a valid identifier: ``[a-z][a-z0-9_]*``. Note that automatically-generated operation IDs always begin with an underscore. If the named operation already exists, ``UpdateDatabaseDdl`` returns ``ALREADY_EXISTS``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.spanner_admin_database_v1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "update_database_ddl" not in self._inner_api_calls: self._inner_api_calls[ "update_database_ddl" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_database_ddl, default_retry=self._method_configs["UpdateDatabaseDdl"].retry, default_timeout=self._method_configs["UpdateDatabaseDdl"].timeout, client_info=self._client_info, ) request = spanner_database_admin_pb2.UpdateDatabaseDdlRequest( database=database, statements=statements, operation_id=operation_id ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("database", database)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["update_database_ddl"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, empty_pb2.Empty, metadata_type=spanner_database_admin_pb2.UpdateDatabaseDdlMetadata, )
Get the scopes corresponding to admin / read-only state. Returns: Tuple[str, ...]: The tuple of scopes. def _get_scopes(self): """Get the scopes corresponding to admin / read-only state. Returns: Tuple[str, ...]: The tuple of scopes. """ if self._read_only: scopes = (READ_ONLY_SCOPE,) else: scopes = (DATA_SCOPE,) if self._admin: scopes += (ADMIN_SCOPE,) return scopes
Getter for the gRPC stub used for the Table Admin API. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_table_data_client] :end-before: [END bigtable_table_data_client] :rtype: :class:`.bigtable_v2.BigtableClient` :returns: A BigtableClient object. def table_data_client(self): """Getter for the gRPC stub used for the Table Admin API. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_table_data_client] :end-before: [END bigtable_table_data_client] :rtype: :class:`.bigtable_v2.BigtableClient` :returns: A BigtableClient object. """ if self._table_data_client is None: self._table_data_client = _create_gapic_client(bigtable_v2.BigtableClient)( self ) return self._table_data_client
Getter for the gRPC stub used for the Table Admin API. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_table_admin_client] :end-before: [END bigtable_table_admin_client] :rtype: :class:`.bigtable_admin_pb2.BigtableTableAdmin` :returns: A BigtableTableAdmin instance. :raises: :class:`ValueError <exceptions.ValueError>` if the current client is not an admin client or if it has not been :meth:`start`-ed. def table_admin_client(self): """Getter for the gRPC stub used for the Table Admin API. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_table_admin_client] :end-before: [END bigtable_table_admin_client] :rtype: :class:`.bigtable_admin_pb2.BigtableTableAdmin` :returns: A BigtableTableAdmin instance. :raises: :class:`ValueError <exceptions.ValueError>` if the current client is not an admin client or if it has not been :meth:`start`-ed. """ if self._table_admin_client is None: if not self._admin: raise ValueError("Client is not an admin client.") self._table_admin_client = _create_gapic_client( bigtable_admin_v2.BigtableTableAdminClient )(self) return self._table_admin_client
Getter for the gRPC stub used for the Table Admin API. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_instance_admin_client] :end-before: [END bigtable_instance_admin_client] :rtype: :class:`.bigtable_admin_pb2.BigtableInstanceAdmin` :returns: A BigtableInstanceAdmin instance. :raises: :class:`ValueError <exceptions.ValueError>` if the current client is not an admin client or if it has not been :meth:`start`-ed. def instance_admin_client(self): """Getter for the gRPC stub used for the Table Admin API. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_instance_admin_client] :end-before: [END bigtable_instance_admin_client] :rtype: :class:`.bigtable_admin_pb2.BigtableInstanceAdmin` :returns: A BigtableInstanceAdmin instance. :raises: :class:`ValueError <exceptions.ValueError>` if the current client is not an admin client or if it has not been :meth:`start`-ed. """ if self._instance_admin_client is None: if not self._admin: raise ValueError("Client is not an admin client.") self._instance_admin_client = _create_gapic_client( bigtable_admin_v2.BigtableInstanceAdminClient )(self) return self._instance_admin_client
Factory to create a instance associated with this client. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_create_prod_instance] :end-before: [END bigtable_create_prod_instance] :type instance_id: str :param instance_id: The ID of the instance. :type display_name: str :param display_name: (Optional) The display name for the instance in the Cloud Console UI. (Must be between 4 and 30 characters.) If this value is not set in the constructor, will fall back to the instance ID. :type instance_type: int :param instance_type: (Optional) The type of the instance. Possible values are represented by the following constants: :data:`google.cloud.bigtable.enums.InstanceType.PRODUCTION`. :data:`google.cloud.bigtable.enums.InstanceType.DEVELOPMENT`, Defaults to :data:`google.cloud.bigtable.enums.InstanceType.UNSPECIFIED`. :type labels: dict :param labels: (Optional) Labels are a flexible and lightweight mechanism for organizing cloud resources into groups that reflect a customer's organizational needs and deployment strategies. They can be used to filter resources and aggregate metrics. Label keys must be between 1 and 63 characters long. Maximum 64 labels can be associated with a given resource. Label values must be between 0 and 63 characters long. Keys and values must both be under 128 bytes. :rtype: :class:`~google.cloud.bigtable.instance.Instance` :returns: an instance owned by this client. def instance(self, instance_id, display_name=None, instance_type=None, labels=None): """Factory to create a instance associated with this client. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_create_prod_instance] :end-before: [END bigtable_create_prod_instance] :type instance_id: str :param instance_id: The ID of the instance. :type display_name: str :param display_name: (Optional) The display name for the instance in the Cloud Console UI. (Must be between 4 and 30 characters.) If this value is not set in the constructor, will fall back to the instance ID. :type instance_type: int :param instance_type: (Optional) The type of the instance. Possible values are represented by the following constants: :data:`google.cloud.bigtable.enums.InstanceType.PRODUCTION`. :data:`google.cloud.bigtable.enums.InstanceType.DEVELOPMENT`, Defaults to :data:`google.cloud.bigtable.enums.InstanceType.UNSPECIFIED`. :type labels: dict :param labels: (Optional) Labels are a flexible and lightweight mechanism for organizing cloud resources into groups that reflect a customer's organizational needs and deployment strategies. They can be used to filter resources and aggregate metrics. Label keys must be between 1 and 63 characters long. Maximum 64 labels can be associated with a given resource. Label values must be between 0 and 63 characters long. Keys and values must both be under 128 bytes. :rtype: :class:`~google.cloud.bigtable.instance.Instance` :returns: an instance owned by this client. """ return Instance( instance_id, self, display_name=display_name, instance_type=instance_type, labels=labels, )
List instances owned by the project. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_list_instances] :end-before: [END bigtable_list_instances] :rtype: tuple :returns: (instances, failed_locations), where 'instances' is list of :class:`google.cloud.bigtable.instance.Instance`, and 'failed_locations' is a list of locations which could not be resolved. def list_instances(self): """List instances owned by the project. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_list_instances] :end-before: [END bigtable_list_instances] :rtype: tuple :returns: (instances, failed_locations), where 'instances' is list of :class:`google.cloud.bigtable.instance.Instance`, and 'failed_locations' is a list of locations which could not be resolved. """ resp = self.instance_admin_client.list_instances(self.project_path) instances = [Instance.from_pb(instance, self) for instance in resp.instances] return instances, resp.failed_locations
List the clusters in the project. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_list_clusters_in_project] :end-before: [END bigtable_list_clusters_in_project] :rtype: tuple :returns: (clusters, failed_locations), where 'clusters' is list of :class:`google.cloud.bigtable.instance.Cluster`, and 'failed_locations' is a list of strings representing locations which could not be resolved. def list_clusters(self): """List the clusters in the project. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_list_clusters_in_project] :end-before: [END bigtable_list_clusters_in_project] :rtype: tuple :returns: (clusters, failed_locations), where 'clusters' is list of :class:`google.cloud.bigtable.instance.Cluster`, and 'failed_locations' is a list of strings representing locations which could not be resolved. """ resp = self.instance_admin_client.list_clusters( self.instance_admin_client.instance_path(self.project, "-") ) clusters = [] instances = {} for cluster in resp.clusters: match_cluster_name = _CLUSTER_NAME_RE.match(cluster.name) instance_id = match_cluster_name.group("instance") if instance_id not in instances: instances[instance_id] = self.instance(instance_id) clusters.append(Cluster.from_pb(cluster, instances[instance_id])) return clusters, resp.failed_locations
Deletes a metric descriptor. Only user-created [custom metrics](/monitoring/custom-metrics) can be deleted. def DeleteMetricDescriptor(self, request, context): """Deletes a metric descriptor. Only user-created [custom metrics](/monitoring/custom-metrics) can be deleted. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!")
Lists all clusters owned by a project in either the specified zone or all zones. Example: >>> from google.cloud import container_v1 >>> >>> client = container_v1.ClusterManagerClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `zone`: >>> zone = '' >>> >>> response = client.list_clusters(project_id, zone) Args: project_id (str): Deprecated. The Google Developers Console `project ID or project number <https://support.google.com/cloud/answer/6158840>`__. This field has been deprecated and replaced by the parent field. zone (str): Deprecated. The name of the Google Compute Engine `zone <https://cloud.google.com/compute/docs/zones#available>`__ in which the cluster resides, or "-" for all zones. This field has been deprecated and replaced by the parent field. parent (str): The parent (project and location) where the clusters will be listed. Specified in the format 'projects/*/locations/*'. Location "-" matches all zones and all regions. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.container_v1.types.ListClustersResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. def list_clusters( self, project_id, zone, parent=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Lists all clusters owned by a project in either the specified zone or all zones. Example: >>> from google.cloud import container_v1 >>> >>> client = container_v1.ClusterManagerClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `zone`: >>> zone = '' >>> >>> response = client.list_clusters(project_id, zone) Args: project_id (str): Deprecated. The Google Developers Console `project ID or project number <https://support.google.com/cloud/answer/6158840>`__. This field has been deprecated and replaced by the parent field. zone (str): Deprecated. The name of the Google Compute Engine `zone <https://cloud.google.com/compute/docs/zones#available>`__ in which the cluster resides, or "-" for all zones. This field has been deprecated and replaced by the parent field. parent (str): The parent (project and location) where the clusters will be listed. Specified in the format 'projects/*/locations/*'. Location "-" matches all zones and all regions. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.container_v1.types.ListClustersResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "list_clusters" not in self._inner_api_calls: self._inner_api_calls[ "list_clusters" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_clusters, default_retry=self._method_configs["ListClusters"].retry, default_timeout=self._method_configs["ListClusters"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.ListClustersRequest( project_id=project_id, zone=zone, parent=parent ) return self._inner_api_calls["list_clusters"]( request, retry=retry, timeout=timeout, metadata=metadata )
Gets the details of a specific cluster. Example: >>> from google.cloud import container_v1 >>> >>> client = container_v1.ClusterManagerClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `zone`: >>> zone = '' >>> >>> # TODO: Initialize `cluster_id`: >>> cluster_id = '' >>> >>> response = client.get_cluster(project_id, zone, cluster_id) Args: project_id (str): Deprecated. The Google Developers Console `project ID or project number <https://support.google.com/cloud/answer/6158840>`__. This field has been deprecated and replaced by the name field. zone (str): Deprecated. The name of the Google Compute Engine `zone <https://cloud.google.com/compute/docs/zones#available>`__ in which the cluster resides. This field has been deprecated and replaced by the name field. cluster_id (str): Deprecated. The name of the cluster to retrieve. This field has been deprecated and replaced by the name field. name (str): The name (project, location, cluster) of the cluster to retrieve. Specified in the format 'projects/*/locations/*/clusters/\*'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.container_v1.types.Cluster` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. def get_cluster( self, project_id, zone, cluster_id, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Gets the details of a specific cluster. Example: >>> from google.cloud import container_v1 >>> >>> client = container_v1.ClusterManagerClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `zone`: >>> zone = '' >>> >>> # TODO: Initialize `cluster_id`: >>> cluster_id = '' >>> >>> response = client.get_cluster(project_id, zone, cluster_id) Args: project_id (str): Deprecated. The Google Developers Console `project ID or project number <https://support.google.com/cloud/answer/6158840>`__. This field has been deprecated and replaced by the name field. zone (str): Deprecated. The name of the Google Compute Engine `zone <https://cloud.google.com/compute/docs/zones#available>`__ in which the cluster resides. This field has been deprecated and replaced by the name field. cluster_id (str): Deprecated. The name of the cluster to retrieve. This field has been deprecated and replaced by the name field. name (str): The name (project, location, cluster) of the cluster to retrieve. Specified in the format 'projects/*/locations/*/clusters/\*'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.container_v1.types.Cluster` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "get_cluster" not in self._inner_api_calls: self._inner_api_calls[ "get_cluster" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_cluster, default_retry=self._method_configs["GetCluster"].retry, default_timeout=self._method_configs["GetCluster"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.GetClusterRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, name=name ) return self._inner_api_calls["get_cluster"]( request, retry=retry, timeout=timeout, metadata=metadata )
Sets labels on a cluster. Example: >>> from google.cloud import container_v1 >>> >>> client = container_v1.ClusterManagerClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `zone`: >>> zone = '' >>> >>> # TODO: Initialize `cluster_id`: >>> cluster_id = '' >>> >>> # TODO: Initialize `resource_labels`: >>> resource_labels = {} >>> >>> # TODO: Initialize `label_fingerprint`: >>> label_fingerprint = '' >>> >>> response = client.set_labels(project_id, zone, cluster_id, resource_labels, label_fingerprint) Args: project_id (str): Deprecated. The Google Developers Console `project ID or project number <https://developers.google.com/console/help/new/#projectnumber>`__. This field has been deprecated and replaced by the name field. zone (str): Deprecated. The name of the Google Compute Engine `zone <https://cloud.google.com/compute/docs/zones#available>`__ in which the cluster resides. This field has been deprecated and replaced by the name field. cluster_id (str): Deprecated. The name of the cluster. This field has been deprecated and replaced by the name field. resource_labels (dict[str -> str]): The labels to set for that cluster. label_fingerprint (str): The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Kubernetes Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash when updating or changing labels. Make a <code>get()</code> request to the resource to get the latest fingerprint. name (str): The name (project, location, cluster id) of the cluster to set labels. Specified in the format 'projects/*/locations/*/clusters/\*'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.container_v1.types.Operation` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. def set_labels( self, project_id, zone, cluster_id, resource_labels, label_fingerprint, name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Sets labels on a cluster. Example: >>> from google.cloud import container_v1 >>> >>> client = container_v1.ClusterManagerClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `zone`: >>> zone = '' >>> >>> # TODO: Initialize `cluster_id`: >>> cluster_id = '' >>> >>> # TODO: Initialize `resource_labels`: >>> resource_labels = {} >>> >>> # TODO: Initialize `label_fingerprint`: >>> label_fingerprint = '' >>> >>> response = client.set_labels(project_id, zone, cluster_id, resource_labels, label_fingerprint) Args: project_id (str): Deprecated. The Google Developers Console `project ID or project number <https://developers.google.com/console/help/new/#projectnumber>`__. This field has been deprecated and replaced by the name field. zone (str): Deprecated. The name of the Google Compute Engine `zone <https://cloud.google.com/compute/docs/zones#available>`__ in which the cluster resides. This field has been deprecated and replaced by the name field. cluster_id (str): Deprecated. The name of the cluster. This field has been deprecated and replaced by the name field. resource_labels (dict[str -> str]): The labels to set for that cluster. label_fingerprint (str): The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Kubernetes Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash when updating or changing labels. Make a <code>get()</code> request to the resource to get the latest fingerprint. name (str): The name (project, location, cluster id) of the cluster to set labels. Specified in the format 'projects/*/locations/*/clusters/\*'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.container_v1.types.Operation` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "set_labels" not in self._inner_api_calls: self._inner_api_calls[ "set_labels" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_labels, default_retry=self._method_configs["SetLabels"].retry, default_timeout=self._method_configs["SetLabels"].timeout, client_info=self._client_info, ) request = cluster_service_pb2.SetLabelsRequest( project_id=project_id, zone=zone, cluster_id=cluster_id, resource_labels=resource_labels, label_fingerprint=label_fingerprint, name=name, ) return self._inner_api_calls["set_labels"]( request, retry=retry, timeout=timeout, metadata=metadata )
Return a fully-qualified metric string. def metric_path(cls, project, metric): """Return a fully-qualified metric string.""" return google.api_core.path_template.expand( "projects/{project}/metrics/{metric}", project=project, metric=metric )
Gets a logs-based metric. Example: >>> from google.cloud import logging_v2 >>> >>> client = logging_v2.MetricsServiceV2Client() >>> >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') >>> >>> response = client.get_log_metric(metric_name) Args: metric_name (str): The resource name of the desired metric: :: "projects/[PROJECT_ID]/metrics/[METRIC_ID]" retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.logging_v2.types.LogMetric` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. def get_log_metric( self, metric_name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Gets a logs-based metric. Example: >>> from google.cloud import logging_v2 >>> >>> client = logging_v2.MetricsServiceV2Client() >>> >>> metric_name = client.metric_path('[PROJECT]', '[METRIC]') >>> >>> response = client.get_log_metric(metric_name) Args: metric_name (str): The resource name of the desired metric: :: "projects/[PROJECT_ID]/metrics/[METRIC_ID]" retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.logging_v2.types.LogMetric` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "get_log_metric" not in self._inner_api_calls: self._inner_api_calls[ "get_log_metric" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_log_metric, default_retry=self._method_configs["GetLogMetric"].retry, default_timeout=self._method_configs["GetLogMetric"].timeout, client_info=self._client_info, ) request = logging_metrics_pb2.GetLogMetricRequest(metric_name=metric_name) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("metric_name", metric_name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["get_log_metric"]( request, retry=retry, timeout=timeout, metadata=metadata )
Return a fully-qualified metric_descriptor string. def metric_descriptor_path(cls, project, metric_descriptor): """Return a fully-qualified metric_descriptor string.""" return google.api_core.path_template.expand( "projects/{project}/metricDescriptors/{metric_descriptor=**}", project=project, metric_descriptor=metric_descriptor, )
Return a fully-qualified monitored_resource_descriptor string. def monitored_resource_descriptor_path(cls, project, monitored_resource_descriptor): """Return a fully-qualified monitored_resource_descriptor string.""" return google.api_core.path_template.expand( "projects/{project}/monitoredResourceDescriptors/{monitored_resource_descriptor}", project=project, monitored_resource_descriptor=monitored_resource_descriptor, )
Construct a KeyRange protobuf. :rtype: :class:`~google.cloud.spanner_v1.proto.keys_pb2.KeyRange` :returns: protobuf corresponding to this instance. def _to_pb(self): """Construct a KeyRange protobuf. :rtype: :class:`~google.cloud.spanner_v1.proto.keys_pb2.KeyRange` :returns: protobuf corresponding to this instance. """ kwargs = {} if self.start_open is not None: kwargs["start_open"] = _make_list_value_pb(self.start_open) if self.start_closed is not None: kwargs["start_closed"] = _make_list_value_pb(self.start_closed) if self.end_open is not None: kwargs["end_open"] = _make_list_value_pb(self.end_open) if self.end_closed is not None: kwargs["end_closed"] = _make_list_value_pb(self.end_closed) return KeyRangePB(**kwargs)
Return keyrange's state as a dict. :rtype: dict :returns: state of this instance. def _to_dict(self): """Return keyrange's state as a dict. :rtype: dict :returns: state of this instance. """ mapping = {} if self.start_open: mapping["start_open"] = self.start_open if self.start_closed: mapping["start_closed"] = self.start_closed if self.end_open: mapping["end_open"] = self.end_open if self.end_closed: mapping["end_closed"] = self.end_closed return mapping
Construct a KeySet protobuf. :rtype: :class:`~google.cloud.spanner_v1.proto.keys_pb2.KeySet` :returns: protobuf corresponding to this instance. def _to_pb(self): """Construct a KeySet protobuf. :rtype: :class:`~google.cloud.spanner_v1.proto.keys_pb2.KeySet` :returns: protobuf corresponding to this instance. """ if self.all_: return KeySetPB(all=True) kwargs = {} if self.keys: kwargs["keys"] = _make_list_value_pbs(self.keys) if self.ranges: kwargs["ranges"] = [krange._to_pb() for krange in self.ranges] return KeySetPB(**kwargs)
Return keyset's state as a dict. The result can be used to serialize the instance and reconstitute it later using :meth:`_from_dict`. :rtype: dict :returns: state of this instance. def _to_dict(self): """Return keyset's state as a dict. The result can be used to serialize the instance and reconstitute it later using :meth:`_from_dict`. :rtype: dict :returns: state of this instance. """ if self.all_: return {"all": True} return { "keys": self.keys, "ranges": [keyrange._to_dict() for keyrange in self.ranges], }
Create an instance from the corresponding state mapping. :type mapping: dict :param mapping: the instance state. def _from_dict(cls, mapping): """Create an instance from the corresponding state mapping. :type mapping: dict :param mapping: the instance state. """ if mapping.get("all"): return cls(all_=True) r_mappings = mapping.get("ranges", ()) ranges = [KeyRange(**r_mapping) for r_mapping in r_mappings] return cls(keys=mapping.get("keys", ()), ranges=ranges)
Map errors for Unary-Unary and Stream-Unary gRPC callables. def _wrap_unary_errors(callable_): """Map errors for Unary-Unary and Stream-Unary gRPC callables.""" _patch_callable_name(callable_) @six.wraps(callable_) def error_remapped_callable(*args, **kwargs): try: return callable_(*args, **kwargs) except grpc.RpcError as exc: six.raise_from(exceptions.from_grpc_error(exc), exc) return error_remapped_callable
Wrap errors for Unary-Stream and Stream-Stream gRPC callables. The callables that return iterators require a bit more logic to re-map errors when iterating. This wraps both the initial invocation and the iterator of the return value to re-map errors. def _wrap_stream_errors(callable_): """Wrap errors for Unary-Stream and Stream-Stream gRPC callables. The callables that return iterators require a bit more logic to re-map errors when iterating. This wraps both the initial invocation and the iterator of the return value to re-map errors. """ _patch_callable_name(callable_) @general_helpers.wraps(callable_) def error_remapped_callable(*args, **kwargs): try: result = callable_(*args, **kwargs) return _StreamingResponseIterator(result) except grpc.RpcError as exc: six.raise_from(exceptions.from_grpc_error(exc), exc) return error_remapped_callable
Create a secure channel with credentials. Args: target (str): The target service address in the format 'hostname:port'. credentials (google.auth.credentials.Credentials): The credentials. If not specified, then this function will attempt to ascertain the credentials from the environment using :func:`google.auth.default`. scopes (Sequence[str]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. ssl_credentials (grpc.ChannelCredentials): Optional SSL channel credentials. This can be used to specify different certificates. kwargs: Additional key-word args passed to :func:`grpc_gcp.secure_channel` or :func:`grpc.secure_channel`. Returns: grpc.Channel: The created channel. def create_channel( target, credentials=None, scopes=None, ssl_credentials=None, **kwargs ): """Create a secure channel with credentials. Args: target (str): The target service address in the format 'hostname:port'. credentials (google.auth.credentials.Credentials): The credentials. If not specified, then this function will attempt to ascertain the credentials from the environment using :func:`google.auth.default`. scopes (Sequence[str]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. ssl_credentials (grpc.ChannelCredentials): Optional SSL channel credentials. This can be used to specify different certificates. kwargs: Additional key-word args passed to :func:`grpc_gcp.secure_channel` or :func:`grpc.secure_channel`. Returns: grpc.Channel: The created channel. """ if credentials is None: credentials, _ = google.auth.default(scopes=scopes) else: credentials = google.auth.credentials.with_scopes_if_required( credentials, scopes ) request = google.auth.transport.requests.Request() # Create the metadata plugin for inserting the authorization header. metadata_plugin = google.auth.transport.grpc.AuthMetadataPlugin( credentials, request ) # Create a set of grpc.CallCredentials using the metadata plugin. google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin) if ssl_credentials is None: ssl_credentials = grpc.ssl_channel_credentials() # Combine the ssl credentials and the authorization credentials. composite_credentials = grpc.composite_channel_credentials( ssl_credentials, google_auth_credentials ) if HAS_GRPC_GCP: # If grpc_gcp module is available use grpc_gcp.secure_channel, # otherwise, use grpc.secure_channel to create grpc channel. return grpc_gcp.secure_channel(target, composite_credentials, **kwargs) else: return grpc.secure_channel(target, composite_credentials, **kwargs)
Get the next response from the stream. Returns: protobuf.Message: A single response from the stream. def next(self): """Get the next response from the stream. Returns: protobuf.Message: A single response from the stream. """ try: return six.next(self._wrapped) except grpc.RpcError as exc: six.raise_from(exceptions.from_grpc_error(exc), exc)
A functools.wraps helper that handles partial objects on Python 2. def wraps(wrapped): """A functools.wraps helper that handles partial objects on Python 2.""" if isinstance(wrapped, functools.partial): return six.wraps(wrapped, assigned=_PARTIAL_VALID_ASSIGNMENTS) else: return six.wraps(wrapped)
Determine default project explicitly or implicitly as fall-back. In implicit case, supports four environments. In order of precedence, the implicit environments are: * DATASTORE_DATASET environment variable (for ``gcd`` / emulator testing) * GOOGLE_CLOUD_PROJECT environment variable * Google App Engine application ID * Google Compute Engine project ID (from metadata server) :type project: str :param project: Optional. The project to use as default. :rtype: str or ``NoneType`` :returns: Default project if it can be determined. def _determine_default_project(project=None): """Determine default project explicitly or implicitly as fall-back. In implicit case, supports four environments. In order of precedence, the implicit environments are: * DATASTORE_DATASET environment variable (for ``gcd`` / emulator testing) * GOOGLE_CLOUD_PROJECT environment variable * Google App Engine application ID * Google Compute Engine project ID (from metadata server) :type project: str :param project: Optional. The project to use as default. :rtype: str or ``NoneType`` :returns: Default project if it can be determined. """ if project is None: project = _get_gcd_project() if project is None: project = _base_default_project(project=project) return project
Repeat lookup until all keys found (unless stop requested). Helper function for :meth:`Client.get_multi`. :type datastore_api: :class:`google.cloud.datastore._http.HTTPDatastoreAPI` or :class:`google.cloud.datastore_v1.gapic.DatastoreClient` :param datastore_api: The datastore API object used to connect to datastore. :type project: str :param project: The project to make the request for. :type key_pbs: list of :class:`.entity_pb2.Key` :param key_pbs: The keys to retrieve from the datastore. :type missing: list :param missing: (Optional) If a list is passed, the key-only entity protobufs returned by the backend as "missing" will be copied into it. :type deferred: list :param deferred: (Optional) If a list is passed, the key protobufs returned by the backend as "deferred" will be copied into it. :type eventual: bool :param eventual: If False (the default), request ``STRONG`` read consistency. If True, request ``EVENTUAL`` read consistency. :type transaction_id: str :param transaction_id: If passed, make the request in the scope of the given transaction. Incompatible with ``eventual==True``. :rtype: list of :class:`.entity_pb2.Entity` :returns: The requested entities. :raises: :class:`ValueError` if missing / deferred are not null or empty list. def _extended_lookup( datastore_api, project, key_pbs, missing=None, deferred=None, eventual=False, transaction_id=None, ): """Repeat lookup until all keys found (unless stop requested). Helper function for :meth:`Client.get_multi`. :type datastore_api: :class:`google.cloud.datastore._http.HTTPDatastoreAPI` or :class:`google.cloud.datastore_v1.gapic.DatastoreClient` :param datastore_api: The datastore API object used to connect to datastore. :type project: str :param project: The project to make the request for. :type key_pbs: list of :class:`.entity_pb2.Key` :param key_pbs: The keys to retrieve from the datastore. :type missing: list :param missing: (Optional) If a list is passed, the key-only entity protobufs returned by the backend as "missing" will be copied into it. :type deferred: list :param deferred: (Optional) If a list is passed, the key protobufs returned by the backend as "deferred" will be copied into it. :type eventual: bool :param eventual: If False (the default), request ``STRONG`` read consistency. If True, request ``EVENTUAL`` read consistency. :type transaction_id: str :param transaction_id: If passed, make the request in the scope of the given transaction. Incompatible with ``eventual==True``. :rtype: list of :class:`.entity_pb2.Entity` :returns: The requested entities. :raises: :class:`ValueError` if missing / deferred are not null or empty list. """ if missing is not None and missing != []: raise ValueError("missing must be None or an empty list") if deferred is not None and deferred != []: raise ValueError("deferred must be None or an empty list") results = [] loop_num = 0 read_options = helpers.get_read_options(eventual, transaction_id) while loop_num < _MAX_LOOPS: # loop against possible deferred. loop_num += 1 lookup_response = datastore_api.lookup( project, key_pbs, read_options=read_options ) # Accumulate the new results. results.extend(result.entity for result in lookup_response.found) if missing is not None: missing.extend(result.entity for result in lookup_response.missing) if deferred is not None: deferred.extend(lookup_response.deferred) break if len(lookup_response.deferred) == 0: break # We have deferred keys, and the user didn't ask to know about # them, so retry (but only with the deferred ones). key_pbs = lookup_response.deferred return results
Getter for a wrapped API object. def _datastore_api(self): """Getter for a wrapped API object.""" if self._datastore_api_internal is None: if self._use_grpc: self._datastore_api_internal = make_datastore_api(self) else: self._datastore_api_internal = HTTPDatastoreAPI(self) return self._datastore_api_internal
Retrieve an entity from a single key (if it exists). .. note:: This is just a thin wrapper over :meth:`get_multi`. The backend API does not make a distinction between a single key or multiple keys in a lookup request. :type key: :class:`google.cloud.datastore.key.Key` :param key: The key to be retrieved from the datastore. :type missing: list :param missing: (Optional) If a list is passed, the key-only entities returned by the backend as "missing" will be copied into it. :type deferred: list :param deferred: (Optional) If a list is passed, the keys returned by the backend as "deferred" will be copied into it. :type transaction: :class:`~google.cloud.datastore.transaction.Transaction` :param transaction: (Optional) Transaction to use for read consistency. If not passed, uses current transaction, if set. :type eventual: bool :param eventual: (Optional) Defaults to strongly consistent (False). Setting True will use eventual consistency, but cannot be used inside a transaction or will raise ValueError. :rtype: :class:`google.cloud.datastore.entity.Entity` or ``NoneType`` :returns: The requested entity if it exists. :raises: :class:`ValueError` if eventual is True and in a transaction. def get(self, key, missing=None, deferred=None, transaction=None, eventual=False): """Retrieve an entity from a single key (if it exists). .. note:: This is just a thin wrapper over :meth:`get_multi`. The backend API does not make a distinction between a single key or multiple keys in a lookup request. :type key: :class:`google.cloud.datastore.key.Key` :param key: The key to be retrieved from the datastore. :type missing: list :param missing: (Optional) If a list is passed, the key-only entities returned by the backend as "missing" will be copied into it. :type deferred: list :param deferred: (Optional) If a list is passed, the keys returned by the backend as "deferred" will be copied into it. :type transaction: :class:`~google.cloud.datastore.transaction.Transaction` :param transaction: (Optional) Transaction to use for read consistency. If not passed, uses current transaction, if set. :type eventual: bool :param eventual: (Optional) Defaults to strongly consistent (False). Setting True will use eventual consistency, but cannot be used inside a transaction or will raise ValueError. :rtype: :class:`google.cloud.datastore.entity.Entity` or ``NoneType`` :returns: The requested entity if it exists. :raises: :class:`ValueError` if eventual is True and in a transaction. """ entities = self.get_multi( keys=[key], missing=missing, deferred=deferred, transaction=transaction, eventual=eventual, ) if entities: return entities[0]
Retrieve entities, along with their attributes. :type keys: list of :class:`google.cloud.datastore.key.Key` :param keys: The keys to be retrieved from the datastore. :type missing: list :param missing: (Optional) If a list is passed, the key-only entities returned by the backend as "missing" will be copied into it. If the list is not empty, an error will occur. :type deferred: list :param deferred: (Optional) If a list is passed, the keys returned by the backend as "deferred" will be copied into it. If the list is not empty, an error will occur. :type transaction: :class:`~google.cloud.datastore.transaction.Transaction` :param transaction: (Optional) Transaction to use for read consistency. If not passed, uses current transaction, if set. :type eventual: bool :param eventual: (Optional) Defaults to strongly consistent (False). Setting True will use eventual consistency, but cannot be used inside a transaction or will raise ValueError. :rtype: list of :class:`google.cloud.datastore.entity.Entity` :returns: The requested entities. :raises: :class:`ValueError` if one or more of ``keys`` has a project which does not match our project. :raises: :class:`ValueError` if eventual is True and in a transaction. def get_multi( self, keys, missing=None, deferred=None, transaction=None, eventual=False ): """Retrieve entities, along with their attributes. :type keys: list of :class:`google.cloud.datastore.key.Key` :param keys: The keys to be retrieved from the datastore. :type missing: list :param missing: (Optional) If a list is passed, the key-only entities returned by the backend as "missing" will be copied into it. If the list is not empty, an error will occur. :type deferred: list :param deferred: (Optional) If a list is passed, the keys returned by the backend as "deferred" will be copied into it. If the list is not empty, an error will occur. :type transaction: :class:`~google.cloud.datastore.transaction.Transaction` :param transaction: (Optional) Transaction to use for read consistency. If not passed, uses current transaction, if set. :type eventual: bool :param eventual: (Optional) Defaults to strongly consistent (False). Setting True will use eventual consistency, but cannot be used inside a transaction or will raise ValueError. :rtype: list of :class:`google.cloud.datastore.entity.Entity` :returns: The requested entities. :raises: :class:`ValueError` if one or more of ``keys`` has a project which does not match our project. :raises: :class:`ValueError` if eventual is True and in a transaction. """ if not keys: return [] ids = set(key.project for key in keys) for current_id in ids: if current_id != self.project: raise ValueError("Keys do not match project") if transaction is None: transaction = self.current_transaction entity_pbs = _extended_lookup( datastore_api=self._datastore_api, project=self.project, key_pbs=[key.to_protobuf() for key in keys], eventual=eventual, missing=missing, deferred=deferred, transaction_id=transaction and transaction.id, ) if missing is not None: missing[:] = [ helpers.entity_from_protobuf(missed_pb) for missed_pb in missing ] if deferred is not None: deferred[:] = [ helpers.key_from_protobuf(deferred_pb) for deferred_pb in deferred ] return [helpers.entity_from_protobuf(entity_pb) for entity_pb in entity_pbs]
Save entities in the Cloud Datastore. :type entities: list of :class:`google.cloud.datastore.entity.Entity` :param entities: The entities to be saved to the datastore. :raises: :class:`ValueError` if ``entities`` is a single entity. def put_multi(self, entities): """Save entities in the Cloud Datastore. :type entities: list of :class:`google.cloud.datastore.entity.Entity` :param entities: The entities to be saved to the datastore. :raises: :class:`ValueError` if ``entities`` is a single entity. """ if isinstance(entities, Entity): raise ValueError("Pass a sequence of entities") if not entities: return current = self.current_batch in_batch = current is not None if not in_batch: current = self.batch() current.begin() for entity in entities: current.put(entity) if not in_batch: current.commit()
Delete keys from the Cloud Datastore. :type keys: list of :class:`google.cloud.datastore.key.Key` :param keys: The keys to be deleted from the Datastore. def delete_multi(self, keys): """Delete keys from the Cloud Datastore. :type keys: list of :class:`google.cloud.datastore.key.Key` :param keys: The keys to be deleted from the Datastore. """ if not keys: return # We allow partial keys to attempt a delete, the backend will fail. current = self.current_batch in_batch = current is not None if not in_batch: current = self.batch() current.begin() for key in keys: current.delete(key) if not in_batch: current.commit()
Allocate a list of IDs from a partial key. :type incomplete_key: :class:`google.cloud.datastore.key.Key` :param incomplete_key: Partial key to use as base for allocated IDs. :type num_ids: int :param num_ids: The number of IDs to allocate. :rtype: list of :class:`google.cloud.datastore.key.Key` :returns: The (complete) keys allocated with ``incomplete_key`` as root. :raises: :class:`ValueError` if ``incomplete_key`` is not a partial key. def allocate_ids(self, incomplete_key, num_ids): """Allocate a list of IDs from a partial key. :type incomplete_key: :class:`google.cloud.datastore.key.Key` :param incomplete_key: Partial key to use as base for allocated IDs. :type num_ids: int :param num_ids: The number of IDs to allocate. :rtype: list of :class:`google.cloud.datastore.key.Key` :returns: The (complete) keys allocated with ``incomplete_key`` as root. :raises: :class:`ValueError` if ``incomplete_key`` is not a partial key. """ if not incomplete_key.is_partial: raise ValueError(("Key is not partial.", incomplete_key)) incomplete_key_pb = incomplete_key.to_protobuf() incomplete_key_pbs = [incomplete_key_pb] * num_ids response_pb = self._datastore_api.allocate_ids( incomplete_key.project, incomplete_key_pbs ) allocated_ids = [ allocated_key_pb.path[-1].id for allocated_key_pb in response_pb.keys ] return [ incomplete_key.completed_key(allocated_id) for allocated_id in allocated_ids ]
Proxy to :class:`google.cloud.datastore.key.Key`. Passes our ``project``. def key(self, *path_args, **kwargs): """Proxy to :class:`google.cloud.datastore.key.Key`. Passes our ``project``. """ if "project" in kwargs: raise TypeError("Cannot pass project") kwargs["project"] = self.project if "namespace" not in kwargs: kwargs["namespace"] = self.namespace return Key(*path_args, **kwargs)
Proxy to :class:`google.cloud.datastore.query.Query`. Passes our ``project``. Using query to search a datastore: .. testsetup:: query import os import uuid from google.cloud import datastore unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8]) client = datastore.Client(namespace='ns{}'.format(unique)) query = client.query(kind='_Doctest') def do_something(entity): pass .. doctest:: query >>> query = client.query(kind='MyKind') >>> query.add_filter('property', '=', 'val') Using the query iterator .. doctest:: query >>> query_iter = query.fetch() >>> for entity in query_iter: ... do_something(entity) or manually page through results .. testsetup:: query-page import os import uuid from google.cloud import datastore from tests.system.test_system import Config # system tests unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8]) client = datastore.Client(namespace='ns{}'.format(unique)) key = client.key('_Doctest') entity1 = datastore.Entity(key=key) entity1['foo'] = 1337 entity2 = datastore.Entity(key=key) entity2['foo'] = 42 Config.TO_DELETE.extend([entity1, entity2]) client.put_multi([entity1, entity2]) query = client.query(kind='_Doctest') cursor = None .. doctest:: query-page >>> query_iter = query.fetch(start_cursor=cursor) >>> pages = query_iter.pages >>> >>> first_page = next(pages) >>> first_page_entities = list(first_page) >>> query_iter.next_page_token is None True :type kwargs: dict :param kwargs: Parameters for initializing and instance of :class:`~google.cloud.datastore.query.Query`. :rtype: :class:`~google.cloud.datastore.query.Query` :returns: A query object. def query(self, **kwargs): """Proxy to :class:`google.cloud.datastore.query.Query`. Passes our ``project``. Using query to search a datastore: .. testsetup:: query import os import uuid from google.cloud import datastore unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8]) client = datastore.Client(namespace='ns{}'.format(unique)) query = client.query(kind='_Doctest') def do_something(entity): pass .. doctest:: query >>> query = client.query(kind='MyKind') >>> query.add_filter('property', '=', 'val') Using the query iterator .. doctest:: query >>> query_iter = query.fetch() >>> for entity in query_iter: ... do_something(entity) or manually page through results .. testsetup:: query-page import os import uuid from google.cloud import datastore from tests.system.test_system import Config # system tests unique = os.getenv('CIRCLE_BUILD_NUM', str(uuid.uuid4())[0:8]) client = datastore.Client(namespace='ns{}'.format(unique)) key = client.key('_Doctest') entity1 = datastore.Entity(key=key) entity1['foo'] = 1337 entity2 = datastore.Entity(key=key) entity2['foo'] = 42 Config.TO_DELETE.extend([entity1, entity2]) client.put_multi([entity1, entity2]) query = client.query(kind='_Doctest') cursor = None .. doctest:: query-page >>> query_iter = query.fetch(start_cursor=cursor) >>> pages = query_iter.pages >>> >>> first_page = next(pages) >>> first_page_entities = list(first_page) >>> query_iter.next_page_token is None True :type kwargs: dict :param kwargs: Parameters for initializing and instance of :class:`~google.cloud.datastore.query.Query`. :rtype: :class:`~google.cloud.datastore.query.Query` :returns: A query object. """ if "client" in kwargs: raise TypeError("Cannot pass client") if "project" in kwargs: raise TypeError("Cannot pass project") kwargs["project"] = self.project if "namespace" not in kwargs: kwargs["namespace"] = self.namespace return Query(self, **kwargs)
Add a row to the batch. If the current batch meets one of the size limits, the batch is sent synchronously. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_batcher_mutate] :end-before: [END bigtable_batcher_mutate] :type row: class :param row: class:`~google.cloud.bigtable.row.DirectRow`. :raises: One of the following: * :exc:`~.table._BigtableRetryableError` if any row returned a transient error. * :exc:`RuntimeError` if the number of responses doesn't match the number of rows that were retried * :exc:`.batcher.MaxMutationsError` if any row exceeds max mutations count. def mutate(self, row): """ Add a row to the batch. If the current batch meets one of the size limits, the batch is sent synchronously. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_batcher_mutate] :end-before: [END bigtable_batcher_mutate] :type row: class :param row: class:`~google.cloud.bigtable.row.DirectRow`. :raises: One of the following: * :exc:`~.table._BigtableRetryableError` if any row returned a transient error. * :exc:`RuntimeError` if the number of responses doesn't match the number of rows that were retried * :exc:`.batcher.MaxMutationsError` if any row exceeds max mutations count. """ mutation_count = len(row._get_mutations()) if mutation_count > MAX_MUTATIONS: raise MaxMutationsError( "The row key {} exceeds the number of mutations {}.".format( row.row_key, mutation_count ) ) if (self.total_mutation_count + mutation_count) >= MAX_MUTATIONS: self.flush() self.rows.append(row) self.total_mutation_count += mutation_count self.total_size += row.get_mutations_size() if self.total_size >= self.max_row_bytes or len(self.rows) >= self.flush_count: self.flush()
Sends the current. batch to Cloud Bigtable. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_batcher_flush] :end-before: [END bigtable_batcher_flush] def flush(self): """ Sends the current. batch to Cloud Bigtable. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_batcher_flush] :end-before: [END bigtable_batcher_flush] """ if len(self.rows) != 0: self.table.mutate_rows(self.rows) self.total_mutation_count = 0 self.total_size = 0 self.rows = []
Verify that a topic path is in the correct format. .. _resource manager docs: https://cloud.google.com/resource-manager/\ reference/rest/v1beta1/projects#\ Project.FIELDS.project_id .. _topic spec: https://cloud.google.com/storage/docs/json_api/v1/\ notifications/insert#topic Expected to be of the form: //pubsub.googleapis.com/projects/{project}/topics/{topic} where the ``project`` value must be "6 to 30 lowercase letters, digits, or hyphens. It must start with a letter. Trailing hyphens are prohibited." (see `resource manager docs`_) and ``topic`` must have length at least two, must start with a letter and may only contain alphanumeric characters or ``-``, ``_``, ``.``, ``~``, ``+`` or ``%`` (i.e characters used for URL encoding, see `topic spec`_). Args: topic_path (str): The topic path to be verified. Returns: Tuple[str, str]: The ``project`` and ``topic`` parsed from the ``topic_path``. Raises: ValueError: If the topic path is invalid. def _parse_topic_path(topic_path): """Verify that a topic path is in the correct format. .. _resource manager docs: https://cloud.google.com/resource-manager/\ reference/rest/v1beta1/projects#\ Project.FIELDS.project_id .. _topic spec: https://cloud.google.com/storage/docs/json_api/v1/\ notifications/insert#topic Expected to be of the form: //pubsub.googleapis.com/projects/{project}/topics/{topic} where the ``project`` value must be "6 to 30 lowercase letters, digits, or hyphens. It must start with a letter. Trailing hyphens are prohibited." (see `resource manager docs`_) and ``topic`` must have length at least two, must start with a letter and may only contain alphanumeric characters or ``-``, ``_``, ``.``, ``~``, ``+`` or ``%`` (i.e characters used for URL encoding, see `topic spec`_). Args: topic_path (str): The topic path to be verified. Returns: Tuple[str, str]: The ``project`` and ``topic`` parsed from the ``topic_path``. Raises: ValueError: If the topic path is invalid. """ match = _TOPIC_REF_RE.match(topic_path) if match is None: raise ValueError(_BAD_TOPIC.format(topic_path)) return match.group("name"), match.group("project")
Construct an instance from the JSON repr returned by the server. See: https://cloud.google.com/storage/docs/json_api/v1/notifications :type resource: dict :param resource: JSON repr of the notification :type bucket: :class:`google.cloud.storage.bucket.Bucket` :param bucket: Bucket to which the notification is bound. :rtype: :class:`BucketNotification` :returns: the new notification instance def from_api_repr(cls, resource, bucket): """Construct an instance from the JSON repr returned by the server. See: https://cloud.google.com/storage/docs/json_api/v1/notifications :type resource: dict :param resource: JSON repr of the notification :type bucket: :class:`google.cloud.storage.bucket.Bucket` :param bucket: Bucket to which the notification is bound. :rtype: :class:`BucketNotification` :returns: the new notification instance """ topic_path = resource.get("topic") if topic_path is None: raise ValueError("Resource has no topic") name, project = _parse_topic_path(topic_path) instance = cls(bucket, name, topic_project=project) instance._properties = resource return instance
Helper for :meth:`reload`. :type response: dict :param response: resource mapping from server def _set_properties(self, response): """Helper for :meth:`reload`. :type response: dict :param response: resource mapping from server """ self._properties.clear() self._properties.update(response)
API wrapper: create the notification. See: https://cloud.google.com/storage/docs/json_api/v1/notifications/insert If :attr:`user_project` is set on the bucket, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Client` :param client: (Optional) the client to use. If not passed, falls back to the ``client`` stored on the notification's bucket. def create(self, client=None): """API wrapper: create the notification. See: https://cloud.google.com/storage/docs/json_api/v1/notifications/insert If :attr:`user_project` is set on the bucket, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Client` :param client: (Optional) the client to use. If not passed, falls back to the ``client`` stored on the notification's bucket. """ if self.notification_id is not None: raise ValueError( "Notification already exists w/ id: {}".format(self.notification_id) ) client = self._require_client(client) query_params = {} if self.bucket.user_project is not None: query_params["userProject"] = self.bucket.user_project path = "/b/{}/notificationConfigs".format(self.bucket.name) properties = self._properties.copy() properties["topic"] = _TOPIC_REF_FMT.format(self.topic_project, self.topic_name) self._properties = client._connection.api_request( method="POST", path=path, query_params=query_params, data=properties )
Test whether this notification exists. See: https://cloud.google.com/storage/docs/json_api/v1/notifications/get If :attr:`user_project` is set on the bucket, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: bool :returns: True, if the notification exists, else False. :raises ValueError: if the notification has no ID. def exists(self, client=None): """Test whether this notification exists. See: https://cloud.google.com/storage/docs/json_api/v1/notifications/get If :attr:`user_project` is set on the bucket, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: bool :returns: True, if the notification exists, else False. :raises ValueError: if the notification has no ID. """ if self.notification_id is None: raise ValueError("Notification not intialized by server") client = self._require_client(client) query_params = {} if self.bucket.user_project is not None: query_params["userProject"] = self.bucket.user_project try: client._connection.api_request( method="GET", path=self.path, query_params=query_params ) except NotFound: return False else: return True
Update this notification from the server configuration. See: https://cloud.google.com/storage/docs/json_api/v1/notifications/get If :attr:`user_project` is set on the bucket, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: bool :returns: True, if the notification exists, else False. :raises ValueError: if the notification has no ID. def reload(self, client=None): """Update this notification from the server configuration. See: https://cloud.google.com/storage/docs/json_api/v1/notifications/get If :attr:`user_project` is set on the bucket, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: bool :returns: True, if the notification exists, else False. :raises ValueError: if the notification has no ID. """ if self.notification_id is None: raise ValueError("Notification not intialized by server") client = self._require_client(client) query_params = {} if self.bucket.user_project is not None: query_params["userProject"] = self.bucket.user_project response = client._connection.api_request( method="GET", path=self.path, query_params=query_params ) self._set_properties(response)
Delete this notification. See: https://cloud.google.com/storage/docs/json_api/v1/notifications/delete If :attr:`user_project` is set on the bucket, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :raises: :class:`google.api_core.exceptions.NotFound`: if the notification does not exist. :raises ValueError: if the notification has no ID. def delete(self, client=None): """Delete this notification. See: https://cloud.google.com/storage/docs/json_api/v1/notifications/delete If :attr:`user_project` is set on the bucket, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :raises: :class:`google.api_core.exceptions.NotFound`: if the notification does not exist. :raises ValueError: if the notification has no ID. """ if self.notification_id is None: raise ValueError("Notification not intialized by server") client = self._require_client(client) query_params = {} if self.bucket.user_project is not None: query_params["userProject"] = self.bucket.user_project client._connection.api_request( method="DELETE", path=self.path, query_params=query_params )
Creates a Redis instance based on the specified tier and memory size. By default, the instance is accessible from the project's `default network <https://cloud.google.com/compute/docs/networks-and-firewalls#networks>`__. The creation is executed asynchronously and callers may check the returned operation to track its progress. Once the operation is completed the Redis instance will be fully functional. Completed longrunning.Operation will contain the new instance object in the response field. The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. Example: >>> from google.cloud import redis_v1 >>> from google.cloud.redis_v1 import enums >>> >>> client = redis_v1.CloudRedisClient() >>> >>> parent = client.location_path('[PROJECT]', '[LOCATION]') >>> instance_id = 'test_instance' >>> tier = enums.Instance.Tier.BASIC >>> memory_size_gb = 1 >>> instance = {'tier': tier, 'memory_size_gb': memory_size_gb} >>> >>> response = client.create_instance(parent, instance_id, instance) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): Required. The resource name of the instance location using the form: ``projects/{project_id}/locations/{location_id}`` where ``location_id`` refers to a GCP region instance_id (str): Required. The logical name of the Redis instance in the customer project with the following restrictions: - Must contain only lowercase letters, numbers, and hyphens. - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - Must be unique within the customer project / location instance (Union[dict, ~google.cloud.redis_v1.types.Instance]): Required. A Redis [Instance] resource If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.redis_v1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.redis_v1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. def create_instance( self, parent, instance_id, instance, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a Redis instance based on the specified tier and memory size. By default, the instance is accessible from the project's `default network <https://cloud.google.com/compute/docs/networks-and-firewalls#networks>`__. The creation is executed asynchronously and callers may check the returned operation to track its progress. Once the operation is completed the Redis instance will be fully functional. Completed longrunning.Operation will contain the new instance object in the response field. The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. Example: >>> from google.cloud import redis_v1 >>> from google.cloud.redis_v1 import enums >>> >>> client = redis_v1.CloudRedisClient() >>> >>> parent = client.location_path('[PROJECT]', '[LOCATION]') >>> instance_id = 'test_instance' >>> tier = enums.Instance.Tier.BASIC >>> memory_size_gb = 1 >>> instance = {'tier': tier, 'memory_size_gb': memory_size_gb} >>> >>> response = client.create_instance(parent, instance_id, instance) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): Required. The resource name of the instance location using the form: ``projects/{project_id}/locations/{location_id}`` where ``location_id`` refers to a GCP region instance_id (str): Required. The logical name of the Redis instance in the customer project with the following restrictions: - Must contain only lowercase letters, numbers, and hyphens. - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - Must be unique within the customer project / location instance (Union[dict, ~google.cloud.redis_v1.types.Instance]): Required. A Redis [Instance] resource If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.redis_v1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.redis_v1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "create_instance" not in self._inner_api_calls: self._inner_api_calls[ "create_instance" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_instance, default_retry=self._method_configs["CreateInstance"].retry, default_timeout=self._method_configs["CreateInstance"].timeout, client_info=self._client_info, ) request = cloud_redis_pb2.CreateInstanceRequest( parent=parent, instance_id=instance_id, instance=instance ) operation = self._inner_api_calls["create_instance"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, cloud_redis_pb2.Instance, metadata_type=cloud_redis_pb2.OperationMetadata, )
Import a Redis RDB snapshot file from GCS into a Redis instance. Redis may stop serving during this operation. Instance state will be IMPORTING for entire operation. When complete, the instance will contain only data from the imported file. The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. Example: >>> from google.cloud import redis_v1 >>> >>> client = redis_v1.CloudRedisClient() >>> >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') >>> >>> # TODO: Initialize `input_config`: >>> input_config = {} >>> >>> response = client.import_instance(name, input_config) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: name (str): Required. Redis instance resource name using the form: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region input_config (Union[dict, ~google.cloud.redis_v1.types.InputConfig]): Required. Specify data to be imported. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.redis_v1.types.InputConfig` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.redis_v1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. def import_instance( self, name, input_config, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Import a Redis RDB snapshot file from GCS into a Redis instance. Redis may stop serving during this operation. Instance state will be IMPORTING for entire operation. When complete, the instance will contain only data from the imported file. The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. Example: >>> from google.cloud import redis_v1 >>> >>> client = redis_v1.CloudRedisClient() >>> >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') >>> >>> # TODO: Initialize `input_config`: >>> input_config = {} >>> >>> response = client.import_instance(name, input_config) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: name (str): Required. Redis instance resource name using the form: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region input_config (Union[dict, ~google.cloud.redis_v1.types.InputConfig]): Required. Specify data to be imported. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.redis_v1.types.InputConfig` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.redis_v1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "import_instance" not in self._inner_api_calls: self._inner_api_calls[ "import_instance" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.import_instance, default_retry=self._method_configs["ImportInstance"].retry, default_timeout=self._method_configs["ImportInstance"].timeout, client_info=self._client_info, ) request = cloud_redis_pb2.ImportInstanceRequest( name=name, input_config=input_config ) operation = self._inner_api_calls["import_instance"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, cloud_redis_pb2.Instance, metadata_type=cloud_redis_pb2.OperationMetadata, )
Return a fully-qualified notification_channel string. def notification_channel_path(cls, project, notification_channel): """Return a fully-qualified notification_channel string.""" return google.api_core.path_template.expand( "projects/{project}/notificationChannels/{notification_channel}", project=project, notification_channel=notification_channel, )
Return a fully-qualified notification_channel_descriptor string. def notification_channel_descriptor_path(cls, project, channel_descriptor): """Return a fully-qualified notification_channel_descriptor string.""" return google.api_core.path_template.expand( "projects/{project}/notificationChannelDescriptors/{channel_descriptor}", project=project, channel_descriptor=channel_descriptor, )
Return a batch to use as a context manager. :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current topic. :rtype: :class:`Batch` :returns: A batch to use as a context manager. def batch(self, client=None): """Return a batch to use as a context manager. :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current topic. :rtype: :class:`Batch` :returns: A batch to use as a context manager. """ client = self._require_client(client) return Batch(self, client)
Helper for :meth:`log_empty`, :meth:`log_text`, etc. def _do_log(self, client, _entry_class, payload=None, **kw): """Helper for :meth:`log_empty`, :meth:`log_text`, etc. """ client = self._require_client(client) # Apply defaults kw["log_name"] = kw.pop("log_name", self.full_name) kw["labels"] = kw.pop("labels", self.labels) kw["resource"] = kw.pop("resource", _GLOBAL_RESOURCE) if payload is not None: entry = _entry_class(payload=payload, **kw) else: entry = _entry_class(**kw) api_repr = entry.to_api_repr() client.logging_api.write_entries([api_repr])
API call: log a text message via a POST request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write :type text: str :param text: the log message. :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. :type kw: dict :param kw: (optional) additional keyword arguments for the entry. See :class:`~google.cloud.logging.entries.LogEntry`. def log_text(self, text, client=None, **kw): """API call: log a text message via a POST request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write :type text: str :param text: the log message. :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. :type kw: dict :param kw: (optional) additional keyword arguments for the entry. See :class:`~google.cloud.logging.entries.LogEntry`. """ self._do_log(client, TextEntry, text, **kw)
API call: log a structured message via a POST request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write :type info: dict :param info: the log entry information :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. :type kw: dict :param kw: (optional) additional keyword arguments for the entry. See :class:`~google.cloud.logging.entries.LogEntry`. def log_struct(self, info, client=None, **kw): """API call: log a structured message via a POST request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write :type info: dict :param info: the log entry information :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. :type kw: dict :param kw: (optional) additional keyword arguments for the entry. See :class:`~google.cloud.logging.entries.LogEntry`. """ self._do_log(client, StructEntry, info, **kw)
API call: log a protobuf message via a POST request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list :type message: :class:`~google.protobuf.message.Message` :param message: The protobuf message to be logged. :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. :type kw: dict :param kw: (optional) additional keyword arguments for the entry. See :class:`~google.cloud.logging.entries.LogEntry`. def log_proto(self, message, client=None, **kw): """API call: log a protobuf message via a POST request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list :type message: :class:`~google.protobuf.message.Message` :param message: The protobuf message to be logged. :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. :type kw: dict :param kw: (optional) additional keyword arguments for the entry. See :class:`~google.cloud.logging.entries.LogEntry`. """ self._do_log(client, ProtobufEntry, message, **kw)