repo
stringlengths
7
55
path
stringlengths
4
223
func_name
stringlengths
1
134
original_string
stringlengths
75
104k
language
stringclasses
1 value
code
stringlengths
75
104k
code_tokens
listlengths
19
28.4k
docstring
stringlengths
1
46.9k
docstring_tokens
listlengths
1
1.97k
sha
stringlengths
40
40
url
stringlengths
87
315
partition
stringclasses
1 value
googleapis/google-cloud-python
error_reporting/google/cloud/error_reporting/client.py
Client.report_errors_api
def report_errors_api(self): """Helper for logging-related API calls. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs :rtype: :class:`_gapic._ErrorReportingGapicApi` or :class:`._logging._ErrorReportingLoggingAPI` :returns: A class that implements the report errors API. """ if self._report_errors_api is None: if self._use_grpc: self._report_errors_api = make_report_error_api(self) else: self._report_errors_api = _ErrorReportingLoggingAPI( self.project, self._credentials, self._http ) return self._report_errors_api
python
def report_errors_api(self): """Helper for logging-related API calls. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs :rtype: :class:`_gapic._ErrorReportingGapicApi` or :class:`._logging._ErrorReportingLoggingAPI` :returns: A class that implements the report errors API. """ if self._report_errors_api is None: if self._use_grpc: self._report_errors_api = make_report_error_api(self) else: self._report_errors_api = _ErrorReportingLoggingAPI( self.project, self._credentials, self._http ) return self._report_errors_api
[ "def", "report_errors_api", "(", "self", ")", ":", "if", "self", ".", "_report_errors_api", "is", "None", ":", "if", "self", ".", "_use_grpc", ":", "self", ".", "_report_errors_api", "=", "make_report_error_api", "(", "self", ")", "else", ":", "self", ".", "_report_errors_api", "=", "_ErrorReportingLoggingAPI", "(", "self", ".", "project", ",", "self", ".", "_credentials", ",", "self", ".", "_http", ")", "return", "self", ".", "_report_errors_api" ]
Helper for logging-related API calls. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs :rtype: :class:`_gapic._ErrorReportingGapicApi` or :class:`._logging._ErrorReportingLoggingAPI` :returns: A class that implements the report errors API.
[ "Helper", "for", "logging", "-", "related", "API", "calls", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/error_reporting/google/cloud/error_reporting/client.py#L162-L182
train
googleapis/google-cloud-python
error_reporting/google/cloud/error_reporting/client.py
Client._build_error_report
def _build_error_report( self, message, report_location=None, http_context=None, user=None ): """Builds the Error Reporting object to report. This builds the object according to https://cloud.google.com/error-reporting/docs/formatting-error-messages :type message: str :param message: The stack trace that was reported or logged by the service. :type report_location: dict :param report_location: The location in the source code where the decision was made to report the error, usually the place where it was logged. For a logged exception this would be the source line where the exception is logged, usually close to the place where it was caught. This should be a Python dict that contains the keys 'filePath', 'lineNumber', and 'functionName' :type http_context: :class`google.cloud.error_reporting.HTTPContext` :param http_context: The HTTP request which was processed when the error was triggered. :type user: str :param user: The user who caused or was affected by the crash. This can be a user ID, an email address, or an arbitrary token that uniquely identifies the user. When sending an error report, leave this field empty if the user was not logged in. In this case the Error Reporting system will use other data, such as remote IP address, to distinguish affected users. :rtype: dict :returns: A dict payload ready to be serialized to JSON and sent to the API. """ payload = { "serviceContext": {"service": self.service}, "message": "{0}".format(message), } if self.version: payload["serviceContext"]["version"] = self.version if report_location or http_context or user: payload["context"] = {} if report_location: payload["context"]["reportLocation"] = report_location if http_context: http_context_dict = http_context.__dict__ # strip out None values payload["context"]["httpRequest"] = { key: value for key, value in six.iteritems(http_context_dict) if value is not None } if user: payload["context"]["user"] = user return payload
python
def _build_error_report( self, message, report_location=None, http_context=None, user=None ): """Builds the Error Reporting object to report. This builds the object according to https://cloud.google.com/error-reporting/docs/formatting-error-messages :type message: str :param message: The stack trace that was reported or logged by the service. :type report_location: dict :param report_location: The location in the source code where the decision was made to report the error, usually the place where it was logged. For a logged exception this would be the source line where the exception is logged, usually close to the place where it was caught. This should be a Python dict that contains the keys 'filePath', 'lineNumber', and 'functionName' :type http_context: :class`google.cloud.error_reporting.HTTPContext` :param http_context: The HTTP request which was processed when the error was triggered. :type user: str :param user: The user who caused or was affected by the crash. This can be a user ID, an email address, or an arbitrary token that uniquely identifies the user. When sending an error report, leave this field empty if the user was not logged in. In this case the Error Reporting system will use other data, such as remote IP address, to distinguish affected users. :rtype: dict :returns: A dict payload ready to be serialized to JSON and sent to the API. """ payload = { "serviceContext": {"service": self.service}, "message": "{0}".format(message), } if self.version: payload["serviceContext"]["version"] = self.version if report_location or http_context or user: payload["context"] = {} if report_location: payload["context"]["reportLocation"] = report_location if http_context: http_context_dict = http_context.__dict__ # strip out None values payload["context"]["httpRequest"] = { key: value for key, value in six.iteritems(http_context_dict) if value is not None } if user: payload["context"]["user"] = user return payload
[ "def", "_build_error_report", "(", "self", ",", "message", ",", "report_location", "=", "None", ",", "http_context", "=", "None", ",", "user", "=", "None", ")", ":", "payload", "=", "{", "\"serviceContext\"", ":", "{", "\"service\"", ":", "self", ".", "service", "}", ",", "\"message\"", ":", "\"{0}\"", ".", "format", "(", "message", ")", ",", "}", "if", "self", ".", "version", ":", "payload", "[", "\"serviceContext\"", "]", "[", "\"version\"", "]", "=", "self", ".", "version", "if", "report_location", "or", "http_context", "or", "user", ":", "payload", "[", "\"context\"", "]", "=", "{", "}", "if", "report_location", ":", "payload", "[", "\"context\"", "]", "[", "\"reportLocation\"", "]", "=", "report_location", "if", "http_context", ":", "http_context_dict", "=", "http_context", ".", "__dict__", "# strip out None values", "payload", "[", "\"context\"", "]", "[", "\"httpRequest\"", "]", "=", "{", "key", ":", "value", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "http_context_dict", ")", "if", "value", "is", "not", "None", "}", "if", "user", ":", "payload", "[", "\"context\"", "]", "[", "\"user\"", "]", "=", "user", "return", "payload" ]
Builds the Error Reporting object to report. This builds the object according to https://cloud.google.com/error-reporting/docs/formatting-error-messages :type message: str :param message: The stack trace that was reported or logged by the service. :type report_location: dict :param report_location: The location in the source code where the decision was made to report the error, usually the place where it was logged. For a logged exception this would be the source line where the exception is logged, usually close to the place where it was caught. This should be a Python dict that contains the keys 'filePath', 'lineNumber', and 'functionName' :type http_context: :class`google.cloud.error_reporting.HTTPContext` :param http_context: The HTTP request which was processed when the error was triggered. :type user: str :param user: The user who caused or was affected by the crash. This can be a user ID, an email address, or an arbitrary token that uniquely identifies the user. When sending an error report, leave this field empty if the user was not logged in. In this case the Error Reporting system will use other data, such as remote IP address, to distinguish affected users. :rtype: dict :returns: A dict payload ready to be serialized to JSON and sent to the API.
[ "Builds", "the", "Error", "Reporting", "object", "to", "report", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/error_reporting/google/cloud/error_reporting/client.py#L184-L247
train
googleapis/google-cloud-python
error_reporting/google/cloud/error_reporting/client.py
Client._send_error_report
def _send_error_report( self, message, report_location=None, http_context=None, user=None ): """Makes the call to the Error Reporting API. This is the lower-level interface to build and send the payload, generally users will use either report() or report_exception() to automatically gather the parameters for this method. :type message: str :param message: The stack trace that was reported or logged by the service. :type report_location: dict :param report_location: The location in the source code where the decision was made to report the error, usually the place where it was logged. For a logged exception this would be the source line where the exception is logged, usually close to the place where it was caught. This should be a Python dict that contains the keys 'filePath', 'lineNumber', and 'functionName' :type http_context: :class`google.cloud.error_reporting.HTTPContext` :param http_context: The HTTP request which was processed when the error was triggered. :type user: str :param user: The user who caused or was affected by the crash. This can be a user ID, an email address, or an arbitrary token that uniquely identifies the user. When sending an error report, leave this field empty if the user was not logged in. In this case the Error Reporting system will use other data, such as remote IP address, to distinguish affected users. """ error_report = self._build_error_report( message, report_location, http_context, user ) self.report_errors_api.report_error_event(error_report)
python
def _send_error_report( self, message, report_location=None, http_context=None, user=None ): """Makes the call to the Error Reporting API. This is the lower-level interface to build and send the payload, generally users will use either report() or report_exception() to automatically gather the parameters for this method. :type message: str :param message: The stack trace that was reported or logged by the service. :type report_location: dict :param report_location: The location in the source code where the decision was made to report the error, usually the place where it was logged. For a logged exception this would be the source line where the exception is logged, usually close to the place where it was caught. This should be a Python dict that contains the keys 'filePath', 'lineNumber', and 'functionName' :type http_context: :class`google.cloud.error_reporting.HTTPContext` :param http_context: The HTTP request which was processed when the error was triggered. :type user: str :param user: The user who caused or was affected by the crash. This can be a user ID, an email address, or an arbitrary token that uniquely identifies the user. When sending an error report, leave this field empty if the user was not logged in. In this case the Error Reporting system will use other data, such as remote IP address, to distinguish affected users. """ error_report = self._build_error_report( message, report_location, http_context, user ) self.report_errors_api.report_error_event(error_report)
[ "def", "_send_error_report", "(", "self", ",", "message", ",", "report_location", "=", "None", ",", "http_context", "=", "None", ",", "user", "=", "None", ")", ":", "error_report", "=", "self", ".", "_build_error_report", "(", "message", ",", "report_location", ",", "http_context", ",", "user", ")", "self", ".", "report_errors_api", ".", "report_error_event", "(", "error_report", ")" ]
Makes the call to the Error Reporting API. This is the lower-level interface to build and send the payload, generally users will use either report() or report_exception() to automatically gather the parameters for this method. :type message: str :param message: The stack trace that was reported or logged by the service. :type report_location: dict :param report_location: The location in the source code where the decision was made to report the error, usually the place where it was logged. For a logged exception this would be the source line where the exception is logged, usually close to the place where it was caught. This should be a Python dict that contains the keys 'filePath', 'lineNumber', and 'functionName' :type http_context: :class`google.cloud.error_reporting.HTTPContext` :param http_context: The HTTP request which was processed when the error was triggered. :type user: str :param user: The user who caused or was affected by the crash. This can be a user ID, an email address, or an arbitrary token that uniquely identifies the user. When sending an error report, leave this field empty if the user was not logged in. In this case the Error Reporting system will use other data, such as remote IP address, to distinguish affected users.
[ "Makes", "the", "call", "to", "the", "Error", "Reporting", "API", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/error_reporting/google/cloud/error_reporting/client.py#L249-L288
train
googleapis/google-cloud-python
error_reporting/google/cloud/error_reporting/client.py
Client.report
def report(self, message, http_context=None, user=None): """ Reports a message to Stackdriver Error Reporting https://cloud.google.com/error-reporting/docs/formatting-error-messages :type message: str :param message: A user-supplied message to report :type http_context: :class`google.cloud.error_reporting.HTTPContext` :param http_context: The HTTP request which was processed when the error was triggered. :type user: str :param user: The user who caused or was affected by the crash. This can be a user ID, an email address, or an arbitrary token that uniquely identifies the user. When sending an error report, leave this field empty if the user was not logged in. In this case the Error Reporting system will use other data, such as remote IP address, to distinguish affected users. Example: .. code-block:: python >>> client.report("Something went wrong!") """ stack = traceback.extract_stack() last_call = stack[-2] file_path = last_call[0] line_number = last_call[1] function_name = last_call[2] report_location = { "filePath": file_path, "lineNumber": line_number, "functionName": function_name, } self._send_error_report( message, http_context=http_context, user=user, report_location=report_location, )
python
def report(self, message, http_context=None, user=None): """ Reports a message to Stackdriver Error Reporting https://cloud.google.com/error-reporting/docs/formatting-error-messages :type message: str :param message: A user-supplied message to report :type http_context: :class`google.cloud.error_reporting.HTTPContext` :param http_context: The HTTP request which was processed when the error was triggered. :type user: str :param user: The user who caused or was affected by the crash. This can be a user ID, an email address, or an arbitrary token that uniquely identifies the user. When sending an error report, leave this field empty if the user was not logged in. In this case the Error Reporting system will use other data, such as remote IP address, to distinguish affected users. Example: .. code-block:: python >>> client.report("Something went wrong!") """ stack = traceback.extract_stack() last_call = stack[-2] file_path = last_call[0] line_number = last_call[1] function_name = last_call[2] report_location = { "filePath": file_path, "lineNumber": line_number, "functionName": function_name, } self._send_error_report( message, http_context=http_context, user=user, report_location=report_location, )
[ "def", "report", "(", "self", ",", "message", ",", "http_context", "=", "None", ",", "user", "=", "None", ")", ":", "stack", "=", "traceback", ".", "extract_stack", "(", ")", "last_call", "=", "stack", "[", "-", "2", "]", "file_path", "=", "last_call", "[", "0", "]", "line_number", "=", "last_call", "[", "1", "]", "function_name", "=", "last_call", "[", "2", "]", "report_location", "=", "{", "\"filePath\"", ":", "file_path", ",", "\"lineNumber\"", ":", "line_number", ",", "\"functionName\"", ":", "function_name", ",", "}", "self", ".", "_send_error_report", "(", "message", ",", "http_context", "=", "http_context", ",", "user", "=", "user", ",", "report_location", "=", "report_location", ",", ")" ]
Reports a message to Stackdriver Error Reporting https://cloud.google.com/error-reporting/docs/formatting-error-messages :type message: str :param message: A user-supplied message to report :type http_context: :class`google.cloud.error_reporting.HTTPContext` :param http_context: The HTTP request which was processed when the error was triggered. :type user: str :param user: The user who caused or was affected by the crash. This can be a user ID, an email address, or an arbitrary token that uniquely identifies the user. When sending an error report, leave this field empty if the user was not logged in. In this case the Error Reporting system will use other data, such as remote IP address, to distinguish affected users. Example: .. code-block:: python >>> client.report("Something went wrong!")
[ "Reports", "a", "message", "to", "Stackdriver", "Error", "Reporting" ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/error_reporting/google/cloud/error_reporting/client.py#L290-L333
train
googleapis/google-cloud-python
error_reporting/google/cloud/error_reporting/client.py
Client.report_exception
def report_exception(self, http_context=None, user=None): """ Reports the details of the latest exceptions to Stackdriver Error Reporting. :type http_context: :class`google.cloud.error_reporting.HTTPContext` :param http_context: The HTTP request which was processed when the error was triggered. :type user: str :param user: The user who caused or was affected by the crash. This can be a user ID, an email address, or an arbitrary token that uniquely identifies the user. When sending an error report, leave this field empty if the user was not logged in. In this case the Error Reporting system will use other data, such as remote IP address, to distinguish affected users. Example:: >>> try: >>> raise NameError >>> except Exception: >>> client.report_exception() """ self._send_error_report( traceback.format_exc(), http_context=http_context, user=user )
python
def report_exception(self, http_context=None, user=None): """ Reports the details of the latest exceptions to Stackdriver Error Reporting. :type http_context: :class`google.cloud.error_reporting.HTTPContext` :param http_context: The HTTP request which was processed when the error was triggered. :type user: str :param user: The user who caused or was affected by the crash. This can be a user ID, an email address, or an arbitrary token that uniquely identifies the user. When sending an error report, leave this field empty if the user was not logged in. In this case the Error Reporting system will use other data, such as remote IP address, to distinguish affected users. Example:: >>> try: >>> raise NameError >>> except Exception: >>> client.report_exception() """ self._send_error_report( traceback.format_exc(), http_context=http_context, user=user )
[ "def", "report_exception", "(", "self", ",", "http_context", "=", "None", ",", "user", "=", "None", ")", ":", "self", ".", "_send_error_report", "(", "traceback", ".", "format_exc", "(", ")", ",", "http_context", "=", "http_context", ",", "user", "=", "user", ")" ]
Reports the details of the latest exceptions to Stackdriver Error Reporting. :type http_context: :class`google.cloud.error_reporting.HTTPContext` :param http_context: The HTTP request which was processed when the error was triggered. :type user: str :param user: The user who caused or was affected by the crash. This can be a user ID, an email address, or an arbitrary token that uniquely identifies the user. When sending an error report, leave this field empty if the user was not logged in. In this case the Error Reporting system will use other data, such as remote IP address, to distinguish affected users. Example:: >>> try: >>> raise NameError >>> except Exception: >>> client.report_exception()
[ "Reports", "the", "details", "of", "the", "latest", "exceptions", "to", "Stackdriver", "Error", "Reporting", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/error_reporting/google/cloud/error_reporting/client.py#L335-L361
train
googleapis/google-cloud-python
datastore/google/cloud/datastore/transaction.py
Transaction.current
def current(self): """Return the topmost transaction. .. note:: If the topmost element on the stack is not a transaction, returns None. :rtype: :class:`google.cloud.datastore.transaction.Transaction` or None :returns: The current transaction (if any are active). """ top = super(Transaction, self).current() if isinstance(top, Transaction): return top
python
def current(self): """Return the topmost transaction. .. note:: If the topmost element on the stack is not a transaction, returns None. :rtype: :class:`google.cloud.datastore.transaction.Transaction` or None :returns: The current transaction (if any are active). """ top = super(Transaction, self).current() if isinstance(top, Transaction): return top
[ "def", "current", "(", "self", ")", ":", "top", "=", "super", "(", "Transaction", ",", "self", ")", ".", "current", "(", ")", "if", "isinstance", "(", "top", ",", "Transaction", ")", ":", "return", "top" ]
Return the topmost transaction. .. note:: If the topmost element on the stack is not a transaction, returns None. :rtype: :class:`google.cloud.datastore.transaction.Transaction` or None :returns: The current transaction (if any are active).
[ "Return", "the", "topmost", "transaction", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/transaction.py#L181-L194
train
googleapis/google-cloud-python
datastore/google/cloud/datastore/transaction.py
Transaction.begin
def begin(self): """Begins a transaction. This method is called automatically when entering a with statement, however it can be called explicitly if you don't want to use a context manager. :raises: :class:`~exceptions.ValueError` if the transaction has already begun. """ super(Transaction, self).begin() try: response_pb = self._client._datastore_api.begin_transaction(self.project) self._id = response_pb.transaction except: # noqa: E722 do not use bare except, specify exception instead self._status = self._ABORTED raise
python
def begin(self): """Begins a transaction. This method is called automatically when entering a with statement, however it can be called explicitly if you don't want to use a context manager. :raises: :class:`~exceptions.ValueError` if the transaction has already begun. """ super(Transaction, self).begin() try: response_pb = self._client._datastore_api.begin_transaction(self.project) self._id = response_pb.transaction except: # noqa: E722 do not use bare except, specify exception instead self._status = self._ABORTED raise
[ "def", "begin", "(", "self", ")", ":", "super", "(", "Transaction", ",", "self", ")", ".", "begin", "(", ")", "try", ":", "response_pb", "=", "self", ".", "_client", ".", "_datastore_api", ".", "begin_transaction", "(", "self", ".", "project", ")", "self", ".", "_id", "=", "response_pb", ".", "transaction", "except", ":", "# noqa: E722 do not use bare except, specify exception instead", "self", ".", "_status", "=", "self", ".", "_ABORTED", "raise" ]
Begins a transaction. This method is called automatically when entering a with statement, however it can be called explicitly if you don't want to use a context manager. :raises: :class:`~exceptions.ValueError` if the transaction has already begun.
[ "Begins", "a", "transaction", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/transaction.py#L196-L212
train
googleapis/google-cloud-python
datastore/google/cloud/datastore/transaction.py
Transaction.rollback
def rollback(self): """Rolls back the current transaction. This method has necessary side-effects: - Sets the current transaction's ID to None. """ try: # No need to use the response it contains nothing. self._client._datastore_api.rollback(self.project, self._id) finally: super(Transaction, self).rollback() # Clear our own ID in case this gets accidentally reused. self._id = None
python
def rollback(self): """Rolls back the current transaction. This method has necessary side-effects: - Sets the current transaction's ID to None. """ try: # No need to use the response it contains nothing. self._client._datastore_api.rollback(self.project, self._id) finally: super(Transaction, self).rollback() # Clear our own ID in case this gets accidentally reused. self._id = None
[ "def", "rollback", "(", "self", ")", ":", "try", ":", "# No need to use the response it contains nothing.", "self", ".", "_client", ".", "_datastore_api", ".", "rollback", "(", "self", ".", "project", ",", "self", ".", "_id", ")", "finally", ":", "super", "(", "Transaction", ",", "self", ")", ".", "rollback", "(", ")", "# Clear our own ID in case this gets accidentally reused.", "self", ".", "_id", "=", "None" ]
Rolls back the current transaction. This method has necessary side-effects: - Sets the current transaction's ID to None.
[ "Rolls", "back", "the", "current", "transaction", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/transaction.py#L214-L227
train
googleapis/google-cloud-python
datastore/google/cloud/datastore/transaction.py
Transaction.put
def put(self, entity): """Adds an entity to be committed. Ensures the transaction is not marked readonly. Please see documentation at :meth:`~google.cloud.datastore.batch.Batch.put` :type entity: :class:`~google.cloud.datastore.entity.Entity` :param entity: the entity to be saved. :raises: :class:`RuntimeError` if the transaction is marked ReadOnly """ if self._options.HasField("read_only"): raise RuntimeError("Transaction is read only") else: super(Transaction, self).put(entity)
python
def put(self, entity): """Adds an entity to be committed. Ensures the transaction is not marked readonly. Please see documentation at :meth:`~google.cloud.datastore.batch.Batch.put` :type entity: :class:`~google.cloud.datastore.entity.Entity` :param entity: the entity to be saved. :raises: :class:`RuntimeError` if the transaction is marked ReadOnly """ if self._options.HasField("read_only"): raise RuntimeError("Transaction is read only") else: super(Transaction, self).put(entity)
[ "def", "put", "(", "self", ",", "entity", ")", ":", "if", "self", ".", "_options", ".", "HasField", "(", "\"read_only\"", ")", ":", "raise", "RuntimeError", "(", "\"Transaction is read only\"", ")", "else", ":", "super", "(", "Transaction", ",", "self", ")", ".", "put", "(", "entity", ")" ]
Adds an entity to be committed. Ensures the transaction is not marked readonly. Please see documentation at :meth:`~google.cloud.datastore.batch.Batch.put` :type entity: :class:`~google.cloud.datastore.entity.Entity` :param entity: the entity to be saved. :raises: :class:`RuntimeError` if the transaction is marked ReadOnly
[ "Adds", "an", "entity", "to", "be", "committed", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/transaction.py#L246-L262
train
googleapis/google-cloud-python
securitycenter/google/cloud/securitycenter_v1beta1/gapic/security_center_client.py
SecurityCenterClient.list_findings
def list_findings( self, parent, filter_=None, order_by=None, read_time=None, field_mask=None, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Lists an organization or source's findings. To list across all sources provide a ``-`` as the source id. Example: /v1beta1/organizations/123/sources/-/findings Example: >>> from google.cloud import securitycenter_v1beta1 >>> >>> client = securitycenter_v1beta1.SecurityCenterClient() >>> >>> parent = client.source_path('[ORGANIZATION]', '[SOURCE]') >>> >>> # Iterate over all results >>> for element in client.list_findings(parent): ... # process element ... pass >>> >>> >>> # Alternatively: >>> >>> # Iterate over results one page at a time >>> for page in client.list_findings(parent).pages: ... for element in page: ... # process element ... pass Args: parent (str): Name of the source the findings belong to. Its format is "organizations/[organization\_id]/sources/[source\_id]". To list across all sources provide a source\_id of ``-``. For example: organizations/123/sources/- filter_ (str): Expression that defines the filter to apply across findings. The expression is a list of one or more restrictions combined via logical operators ``AND`` and ``OR``. Parentheses are not supported, and ``OR`` has higher precedence than ``AND``. Restrictions have the form ``<field> <operator> <value>`` and may have a ``-`` character in front of them to indicate negation. Examples include: - name - source\_properties.a\_property - security\_marks.marks.marka The supported operators are: - ``=`` for all value types. - ``>``, ``<``, ``>=``, ``<=`` for integer values. - ``:``, meaning substring matching, for strings. The supported value types are: - string literals in quotes. - integer literals without quotes. - boolean literals ``true`` and ``false`` without quotes. For example, ``source_properties.size = 100`` is a valid filter string. order_by (str): Expression that defines what fields and order to use for sorting. The string value should follow SQL syntax: comma separated list of fields. For example: "name,resource\_properties.a\_property". The default sorting order is ascending. To specify descending order for a field, a suffix " desc" should be appended to the field name. For example: "name desc,source\_properties.a\_property". Redundant space characters in the syntax are insignificant. "name desc,source\_properties.a\_property" and " name desc , source\_properties.a\_property " are equivalent. read_time (Union[dict, ~google.cloud.securitycenter_v1beta1.types.Timestamp]): Time used as a reference point when filtering findings. The filter is limited to findings existing at the supplied time and their values are those at that specific time. Absence of this field will default to the API's version of NOW. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.securitycenter_v1beta1.types.Timestamp` field_mask (Union[dict, ~google.cloud.securitycenter_v1beta1.types.FieldMask]): Optional. A field mask to specify the Finding fields to be listed in the response. An empty field mask will list all fields. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.securitycenter_v1beta1.types.FieldMask` page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.gax.PageIterator` instance. By default, this is an iterable of :class:`~google.cloud.securitycenter_v1beta1.types.Finding` instances. This object can also be configured to iterate over the pages of the response through the `options` parameter. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "list_findings" not in self._inner_api_calls: self._inner_api_calls[ "list_findings" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_findings, default_retry=self._method_configs["ListFindings"].retry, default_timeout=self._method_configs["ListFindings"].timeout, client_info=self._client_info, ) request = securitycenter_service_pb2.ListFindingsRequest( parent=parent, filter=filter_, order_by=order_by, read_time=read_time, field_mask=field_mask, page_size=page_size, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls["list_findings"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field="findings", request_token_field="page_token", response_token_field="next_page_token", ) return iterator
python
def list_findings( self, parent, filter_=None, order_by=None, read_time=None, field_mask=None, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Lists an organization or source's findings. To list across all sources provide a ``-`` as the source id. Example: /v1beta1/organizations/123/sources/-/findings Example: >>> from google.cloud import securitycenter_v1beta1 >>> >>> client = securitycenter_v1beta1.SecurityCenterClient() >>> >>> parent = client.source_path('[ORGANIZATION]', '[SOURCE]') >>> >>> # Iterate over all results >>> for element in client.list_findings(parent): ... # process element ... pass >>> >>> >>> # Alternatively: >>> >>> # Iterate over results one page at a time >>> for page in client.list_findings(parent).pages: ... for element in page: ... # process element ... pass Args: parent (str): Name of the source the findings belong to. Its format is "organizations/[organization\_id]/sources/[source\_id]". To list across all sources provide a source\_id of ``-``. For example: organizations/123/sources/- filter_ (str): Expression that defines the filter to apply across findings. The expression is a list of one or more restrictions combined via logical operators ``AND`` and ``OR``. Parentheses are not supported, and ``OR`` has higher precedence than ``AND``. Restrictions have the form ``<field> <operator> <value>`` and may have a ``-`` character in front of them to indicate negation. Examples include: - name - source\_properties.a\_property - security\_marks.marks.marka The supported operators are: - ``=`` for all value types. - ``>``, ``<``, ``>=``, ``<=`` for integer values. - ``:``, meaning substring matching, for strings. The supported value types are: - string literals in quotes. - integer literals without quotes. - boolean literals ``true`` and ``false`` without quotes. For example, ``source_properties.size = 100`` is a valid filter string. order_by (str): Expression that defines what fields and order to use for sorting. The string value should follow SQL syntax: comma separated list of fields. For example: "name,resource\_properties.a\_property". The default sorting order is ascending. To specify descending order for a field, a suffix " desc" should be appended to the field name. For example: "name desc,source\_properties.a\_property". Redundant space characters in the syntax are insignificant. "name desc,source\_properties.a\_property" and " name desc , source\_properties.a\_property " are equivalent. read_time (Union[dict, ~google.cloud.securitycenter_v1beta1.types.Timestamp]): Time used as a reference point when filtering findings. The filter is limited to findings existing at the supplied time and their values are those at that specific time. Absence of this field will default to the API's version of NOW. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.securitycenter_v1beta1.types.Timestamp` field_mask (Union[dict, ~google.cloud.securitycenter_v1beta1.types.FieldMask]): Optional. A field mask to specify the Finding fields to be listed in the response. An empty field mask will list all fields. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.securitycenter_v1beta1.types.FieldMask` page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.gax.PageIterator` instance. By default, this is an iterable of :class:`~google.cloud.securitycenter_v1beta1.types.Finding` instances. This object can also be configured to iterate over the pages of the response through the `options` parameter. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "list_findings" not in self._inner_api_calls: self._inner_api_calls[ "list_findings" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_findings, default_retry=self._method_configs["ListFindings"].retry, default_timeout=self._method_configs["ListFindings"].timeout, client_info=self._client_info, ) request = securitycenter_service_pb2.ListFindingsRequest( parent=parent, filter=filter_, order_by=order_by, read_time=read_time, field_mask=field_mask, page_size=page_size, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls["list_findings"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field="findings", request_token_field="page_token", response_token_field="next_page_token", ) return iterator
[ "def", "list_findings", "(", "self", ",", "parent", ",", "filter_", "=", "None", ",", "order_by", "=", "None", ",", "read_time", "=", "None", ",", "field_mask", "=", "None", ",", "page_size", "=", "None", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"list_findings\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"list_findings\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "list_findings", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"ListFindings\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"ListFindings\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "securitycenter_service_pb2", ".", "ListFindingsRequest", "(", "parent", "=", "parent", ",", "filter", "=", "filter_", ",", "order_by", "=", "order_by", ",", "read_time", "=", "read_time", ",", "field_mask", "=", "field_mask", ",", "page_size", "=", "page_size", ",", ")", "if", "metadata", "is", "None", ":", "metadata", "=", "[", "]", "metadata", "=", "list", "(", "metadata", ")", "try", ":", "routing_header", "=", "[", "(", "\"parent\"", ",", "parent", ")", "]", "except", "AttributeError", ":", "pass", "else", ":", "routing_metadata", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "routing_header", ".", "to_grpc_metadata", "(", "routing_header", ")", "metadata", ".", "append", "(", "routing_metadata", ")", "iterator", "=", "google", ".", "api_core", ".", "page_iterator", ".", "GRPCIterator", "(", "client", "=", "None", ",", "method", "=", "functools", ".", "partial", "(", "self", ".", "_inner_api_calls", "[", "\"list_findings\"", "]", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ",", ")", ",", "request", "=", "request", ",", "items_field", "=", "\"findings\"", ",", "request_token_field", "=", "\"page_token\"", ",", "response_token_field", "=", "\"next_page_token\"", ",", ")", "return", "iterator" ]
Lists an organization or source's findings. To list across all sources provide a ``-`` as the source id. Example: /v1beta1/organizations/123/sources/-/findings Example: >>> from google.cloud import securitycenter_v1beta1 >>> >>> client = securitycenter_v1beta1.SecurityCenterClient() >>> >>> parent = client.source_path('[ORGANIZATION]', '[SOURCE]') >>> >>> # Iterate over all results >>> for element in client.list_findings(parent): ... # process element ... pass >>> >>> >>> # Alternatively: >>> >>> # Iterate over results one page at a time >>> for page in client.list_findings(parent).pages: ... for element in page: ... # process element ... pass Args: parent (str): Name of the source the findings belong to. Its format is "organizations/[organization\_id]/sources/[source\_id]". To list across all sources provide a source\_id of ``-``. For example: organizations/123/sources/- filter_ (str): Expression that defines the filter to apply across findings. The expression is a list of one or more restrictions combined via logical operators ``AND`` and ``OR``. Parentheses are not supported, and ``OR`` has higher precedence than ``AND``. Restrictions have the form ``<field> <operator> <value>`` and may have a ``-`` character in front of them to indicate negation. Examples include: - name - source\_properties.a\_property - security\_marks.marks.marka The supported operators are: - ``=`` for all value types. - ``>``, ``<``, ``>=``, ``<=`` for integer values. - ``:``, meaning substring matching, for strings. The supported value types are: - string literals in quotes. - integer literals without quotes. - boolean literals ``true`` and ``false`` without quotes. For example, ``source_properties.size = 100`` is a valid filter string. order_by (str): Expression that defines what fields and order to use for sorting. The string value should follow SQL syntax: comma separated list of fields. For example: "name,resource\_properties.a\_property". The default sorting order is ascending. To specify descending order for a field, a suffix " desc" should be appended to the field name. For example: "name desc,source\_properties.a\_property". Redundant space characters in the syntax are insignificant. "name desc,source\_properties.a\_property" and " name desc , source\_properties.a\_property " are equivalent. read_time (Union[dict, ~google.cloud.securitycenter_v1beta1.types.Timestamp]): Time used as a reference point when filtering findings. The filter is limited to findings existing at the supplied time and their values are those at that specific time. Absence of this field will default to the API's version of NOW. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.securitycenter_v1beta1.types.Timestamp` field_mask (Union[dict, ~google.cloud.securitycenter_v1beta1.types.FieldMask]): Optional. A field mask to specify the Finding fields to be listed in the response. An empty field mask will list all fields. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.securitycenter_v1beta1.types.FieldMask` page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.gax.PageIterator` instance. By default, this is an iterable of :class:`~google.cloud.securitycenter_v1beta1.types.Finding` instances. This object can also be configured to iterate over the pages of the response through the `options` parameter. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Lists", "an", "organization", "or", "source", "s", "findings", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/securitycenter/google/cloud/securitycenter_v1beta1/gapic/security_center_client.py#L1157-L1320
train
googleapis/google-cloud-python
logging/google/cloud/logging/metric.py
Metric.from_api_repr
def from_api_repr(cls, resource, client): """Factory: construct a metric given its API representation :type resource: dict :param resource: metric resource representation returned from the API :type client: :class:`google.cloud.logging.client.Client` :param client: Client which holds credentials and project configuration for the metric. :rtype: :class:`google.cloud.logging.metric.Metric` :returns: Metric parsed from ``resource``. """ metric_name = resource["name"] filter_ = resource["filter"] description = resource.get("description", "") return cls(metric_name, filter_, client=client, description=description)
python
def from_api_repr(cls, resource, client): """Factory: construct a metric given its API representation :type resource: dict :param resource: metric resource representation returned from the API :type client: :class:`google.cloud.logging.client.Client` :param client: Client which holds credentials and project configuration for the metric. :rtype: :class:`google.cloud.logging.metric.Metric` :returns: Metric parsed from ``resource``. """ metric_name = resource["name"] filter_ = resource["filter"] description = resource.get("description", "") return cls(metric_name, filter_, client=client, description=description)
[ "def", "from_api_repr", "(", "cls", ",", "resource", ",", "client", ")", ":", "metric_name", "=", "resource", "[", "\"name\"", "]", "filter_", "=", "resource", "[", "\"filter\"", "]", "description", "=", "resource", ".", "get", "(", "\"description\"", ",", "\"\"", ")", "return", "cls", "(", "metric_name", ",", "filter_", ",", "client", "=", "client", ",", "description", "=", "description", ")" ]
Factory: construct a metric given its API representation :type resource: dict :param resource: metric resource representation returned from the API :type client: :class:`google.cloud.logging.client.Client` :param client: Client which holds credentials and project configuration for the metric. :rtype: :class:`google.cloud.logging.metric.Metric` :returns: Metric parsed from ``resource``.
[ "Factory", ":", "construct", "a", "metric", "given", "its", "API", "representation" ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/metric.py#L69-L85
train
googleapis/google-cloud-python
logging/google/cloud/logging/metric.py
Metric.create
def create(self, client=None): """API call: create the metric via a PUT request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric. """ client = self._require_client(client) client.metrics_api.metric_create( self.project, self.name, self.filter_, self.description )
python
def create(self, client=None): """API call: create the metric via a PUT request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric. """ client = self._require_client(client) client.metrics_api.metric_create( self.project, self.name, self.filter_, self.description )
[ "def", "create", "(", "self", ",", "client", "=", "None", ")", ":", "client", "=", "self", ".", "_require_client", "(", "client", ")", "client", ".", "metrics_api", ".", "metric_create", "(", "self", ".", "project", ",", "self", ".", "name", ",", "self", ".", "filter_", ",", "self", ".", "description", ")" ]
API call: create the metric via a PUT request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric.
[ "API", "call", ":", "create", "the", "metric", "via", "a", "PUT", "request" ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/metric.py#L102-L116
train
googleapis/google-cloud-python
logging/google/cloud/logging/metric.py
Metric.exists
def exists(self, client=None): """API call: test for the existence of the metric via a GET request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric. :rtype: bool :returns: Boolean indicating existence of the metric. """ client = self._require_client(client) try: client.metrics_api.metric_get(self.project, self.name) except NotFound: return False else: return True
python
def exists(self, client=None): """API call: test for the existence of the metric via a GET request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric. :rtype: bool :returns: Boolean indicating existence of the metric. """ client = self._require_client(client) try: client.metrics_api.metric_get(self.project, self.name) except NotFound: return False else: return True
[ "def", "exists", "(", "self", ",", "client", "=", "None", ")", ":", "client", "=", "self", ".", "_require_client", "(", "client", ")", "try", ":", "client", ".", "metrics_api", ".", "metric_get", "(", "self", ".", "project", ",", "self", ".", "name", ")", "except", "NotFound", ":", "return", "False", "else", ":", "return", "True" ]
API call: test for the existence of the metric via a GET request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric. :rtype: bool :returns: Boolean indicating existence of the metric.
[ "API", "call", ":", "test", "for", "the", "existence", "of", "the", "metric", "via", "a", "GET", "request" ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/metric.py#L118-L139
train
googleapis/google-cloud-python
logging/google/cloud/logging/metric.py
Metric.reload
def reload(self, client=None): """API call: sync local metric configuration via a GET request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric. """ client = self._require_client(client) data = client.metrics_api.metric_get(self.project, self.name) self.description = data.get("description", "") self.filter_ = data["filter"]
python
def reload(self, client=None): """API call: sync local metric configuration via a GET request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric. """ client = self._require_client(client) data = client.metrics_api.metric_get(self.project, self.name) self.description = data.get("description", "") self.filter_ = data["filter"]
[ "def", "reload", "(", "self", ",", "client", "=", "None", ")", ":", "client", "=", "self", ".", "_require_client", "(", "client", ")", "data", "=", "client", ".", "metrics_api", ".", "metric_get", "(", "self", ".", "project", ",", "self", ".", "name", ")", "self", ".", "description", "=", "data", ".", "get", "(", "\"description\"", ",", "\"\"", ")", "self", ".", "filter_", "=", "data", "[", "\"filter\"", "]" ]
API call: sync local metric configuration via a GET request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/get :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric.
[ "API", "call", ":", "sync", "local", "metric", "configuration", "via", "a", "GET", "request" ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/metric.py#L141-L155
train
googleapis/google-cloud-python
logging/google/cloud/logging/metric.py
Metric.update
def update(self, client=None): """API call: update metric configuration via a PUT request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric. """ client = self._require_client(client) client.metrics_api.metric_update( self.project, self.name, self.filter_, self.description )
python
def update(self, client=None): """API call: update metric configuration via a PUT request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric. """ client = self._require_client(client) client.metrics_api.metric_update( self.project, self.name, self.filter_, self.description )
[ "def", "update", "(", "self", ",", "client", "=", "None", ")", ":", "client", "=", "self", ".", "_require_client", "(", "client", ")", "client", ".", "metrics_api", ".", "metric_update", "(", "self", ".", "project", ",", "self", ".", "name", ",", "self", ".", "filter_", ",", "self", ".", "description", ")" ]
API call: update metric configuration via a PUT request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric.
[ "API", "call", ":", "update", "metric", "configuration", "via", "a", "PUT", "request" ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/metric.py#L157-L171
train
googleapis/google-cloud-python
logging/google/cloud/logging/metric.py
Metric.delete
def delete(self, client=None): """API call: delete a metric via a DELETE request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/delete :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric. """ client = self._require_client(client) client.metrics_api.metric_delete(self.project, self.name)
python
def delete(self, client=None): """API call: delete a metric via a DELETE request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/delete :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric. """ client = self._require_client(client) client.metrics_api.metric_delete(self.project, self.name)
[ "def", "delete", "(", "self", ",", "client", "=", "None", ")", ":", "client", "=", "self", ".", "_require_client", "(", "client", ")", "client", ".", "metrics_api", ".", "metric_delete", "(", "self", ".", "project", ",", "self", ".", "name", ")" ]
API call: delete a metric via a DELETE request See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/delete :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric.
[ "API", "call", ":", "delete", "a", "metric", "via", "a", "DELETE", "request" ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/metric.py#L173-L185
train
googleapis/google-cloud-python
trace/google/cloud/trace/_gapic.py
_dict_mapping_to_pb
def _dict_mapping_to_pb(mapping, proto_type): """ Convert a dict to protobuf. Args: mapping (dict): A dict that needs to be converted to protobuf. proto_type (str): The type of the Protobuf. Returns: An instance of the specified protobuf. """ converted_pb = getattr(trace_pb2, proto_type)() ParseDict(mapping, converted_pb) return converted_pb
python
def _dict_mapping_to_pb(mapping, proto_type): """ Convert a dict to protobuf. Args: mapping (dict): A dict that needs to be converted to protobuf. proto_type (str): The type of the Protobuf. Returns: An instance of the specified protobuf. """ converted_pb = getattr(trace_pb2, proto_type)() ParseDict(mapping, converted_pb) return converted_pb
[ "def", "_dict_mapping_to_pb", "(", "mapping", ",", "proto_type", ")", ":", "converted_pb", "=", "getattr", "(", "trace_pb2", ",", "proto_type", ")", "(", ")", "ParseDict", "(", "mapping", ",", "converted_pb", ")", "return", "converted_pb" ]
Convert a dict to protobuf. Args: mapping (dict): A dict that needs to be converted to protobuf. proto_type (str): The type of the Protobuf. Returns: An instance of the specified protobuf.
[ "Convert", "a", "dict", "to", "protobuf", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/trace/google/cloud/trace/_gapic.py#L245-L258
train
googleapis/google-cloud-python
trace/google/cloud/trace/_gapic.py
_span_attrs_to_pb
def _span_attrs_to_pb(span_attr, proto_type): """ Convert a span attribute dict to protobuf, including Links, Attributes, TimeEvents. Args: span_attr (dict): A dict that needs to be converted to protobuf. proto_type (str): The type of the Protobuf. Returns: An instance of the specified protobuf. """ attr_pb = getattr(trace_pb2.Span, proto_type)() ParseDict(span_attr, attr_pb) return attr_pb
python
def _span_attrs_to_pb(span_attr, proto_type): """ Convert a span attribute dict to protobuf, including Links, Attributes, TimeEvents. Args: span_attr (dict): A dict that needs to be converted to protobuf. proto_type (str): The type of the Protobuf. Returns: An instance of the specified protobuf. """ attr_pb = getattr(trace_pb2.Span, proto_type)() ParseDict(span_attr, attr_pb) return attr_pb
[ "def", "_span_attrs_to_pb", "(", "span_attr", ",", "proto_type", ")", ":", "attr_pb", "=", "getattr", "(", "trace_pb2", ".", "Span", ",", "proto_type", ")", "(", ")", "ParseDict", "(", "span_attr", ",", "attr_pb", ")", "return", "attr_pb" ]
Convert a span attribute dict to protobuf, including Links, Attributes, TimeEvents. Args: span_attr (dict): A dict that needs to be converted to protobuf. proto_type (str): The type of the Protobuf. Returns: An instance of the specified protobuf.
[ "Convert", "a", "span", "attribute", "dict", "to", "protobuf", "including", "Links", "Attributes", "TimeEvents", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/trace/google/cloud/trace/_gapic.py#L261-L275
train
googleapis/google-cloud-python
trace/google/cloud/trace/_gapic.py
_value_to_pb
def _value_to_pb(value, proto_type): """ Convert a value to protobuf. e.g. BoolValue, Int32Value. Args: value (dict): A dict that needs to be converted to protobuf. proto_type (str): The type of the Protobuf. Returns: An instance of the specified protobuf. """ data_type_pb = getattr(google_dot_protobuf_dot_wrappers__pb2, proto_type)() ParseDict(value, data_type_pb) return data_type_pb
python
def _value_to_pb(value, proto_type): """ Convert a value to protobuf. e.g. BoolValue, Int32Value. Args: value (dict): A dict that needs to be converted to protobuf. proto_type (str): The type of the Protobuf. Returns: An instance of the specified protobuf. """ data_type_pb = getattr(google_dot_protobuf_dot_wrappers__pb2, proto_type)() ParseDict(value, data_type_pb) return data_type_pb
[ "def", "_value_to_pb", "(", "value", ",", "proto_type", ")", ":", "data_type_pb", "=", "getattr", "(", "google_dot_protobuf_dot_wrappers__pb2", ",", "proto_type", ")", "(", ")", "ParseDict", "(", "value", ",", "data_type_pb", ")", "return", "data_type_pb" ]
Convert a value to protobuf. e.g. BoolValue, Int32Value. Args: value (dict): A dict that needs to be converted to protobuf. proto_type (str): The type of the Protobuf. Returns: An instance of the specified protobuf.
[ "Convert", "a", "value", "to", "protobuf", ".", "e", ".", "g", ".", "BoolValue", "Int32Value", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/trace/google/cloud/trace/_gapic.py#L293-L306
train
googleapis/google-cloud-python
trace/google/cloud/trace/_gapic.py
_TraceAPI.batch_write_spans
def batch_write_spans( self, name, spans, retry=method.DEFAULT, timeout=method.DEFAULT ): """ Sends new spans to Stackdriver Trace or updates existing traces. If the name of a trace that you send matches that of an existing trace, new spans are added to the existing trace. Attempt to update existing spans results undefined behavior. If the name does not match, a new trace is created with given set of spans. Args: name (str): Required. Name of the project where the spans belong. The format is ``projects/PROJECT_ID``. spans (list[Union[dict, ~google.cloud.trace_v2.types.Span]]): A collection of spans. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.trace_v2.types.Span` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ spans_pb_list = [] for span_mapping in spans["spans"]: span_pb = _dict_mapping_to_pb(span_mapping, "Span") spans_pb_list.append(span_pb) self._gapic_api.batch_write_spans( name=name, spans=spans_pb_list, retry=retry, timeout=timeout )
python
def batch_write_spans( self, name, spans, retry=method.DEFAULT, timeout=method.DEFAULT ): """ Sends new spans to Stackdriver Trace or updates existing traces. If the name of a trace that you send matches that of an existing trace, new spans are added to the existing trace. Attempt to update existing spans results undefined behavior. If the name does not match, a new trace is created with given set of spans. Args: name (str): Required. Name of the project where the spans belong. The format is ``projects/PROJECT_ID``. spans (list[Union[dict, ~google.cloud.trace_v2.types.Span]]): A collection of spans. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.trace_v2.types.Span` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ spans_pb_list = [] for span_mapping in spans["spans"]: span_pb = _dict_mapping_to_pb(span_mapping, "Span") spans_pb_list.append(span_pb) self._gapic_api.batch_write_spans( name=name, spans=spans_pb_list, retry=retry, timeout=timeout )
[ "def", "batch_write_spans", "(", "self", ",", "name", ",", "spans", ",", "retry", "=", "method", ".", "DEFAULT", ",", "timeout", "=", "method", ".", "DEFAULT", ")", ":", "spans_pb_list", "=", "[", "]", "for", "span_mapping", "in", "spans", "[", "\"spans\"", "]", ":", "span_pb", "=", "_dict_mapping_to_pb", "(", "span_mapping", ",", "\"Span\"", ")", "spans_pb_list", ".", "append", "(", "span_pb", ")", "self", ".", "_gapic_api", ".", "batch_write_spans", "(", "name", "=", "name", ",", "spans", "=", "spans_pb_list", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ")" ]
Sends new spans to Stackdriver Trace or updates existing traces. If the name of a trace that you send matches that of an existing trace, new spans are added to the existing trace. Attempt to update existing spans results undefined behavior. If the name does not match, a new trace is created with given set of spans. Args: name (str): Required. Name of the project where the spans belong. The format is ``projects/PROJECT_ID``. spans (list[Union[dict, ~google.cloud.trace_v2.types.Span]]): A collection of spans. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.trace_v2.types.Span` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Sends", "new", "spans", "to", "Stackdriver", "Trace", "or", "updates", "existing", "traces", ".", "If", "the", "name", "of", "a", "trace", "that", "you", "send", "matches", "that", "of", "an", "existing", "trace", "new", "spans", "are", "added", "to", "the", "existing", "trace", ".", "Attempt", "to", "update", "existing", "spans", "results", "undefined", "behavior", ".", "If", "the", "name", "does", "not", "match", "a", "new", "trace", "is", "created", "with", "given", "set", "of", "spans", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/trace/google/cloud/trace/_gapic.py#L51-L90
train
googleapis/google-cloud-python
trace/google/cloud/trace/_gapic.py
_TraceAPI.create_span
def create_span( self, name, span_id, display_name, start_time, end_time, parent_span_id=None, attributes=None, stack_trace=None, time_events=None, links=None, status=None, same_process_as_parent_span=None, child_span_count=None, retry=method.DEFAULT, timeout=method.DEFAULT, ): """ Creates a new Span. Example: >>> from google.cloud import trace_v2 >>> >>> client = trace_v2.TraceServiceClient() >>> >>> name = client.span_path('[PROJECT]', '[TRACE]', '[SPAN]') >>> span_id = '' >>> display_name = {} >>> start_time = {} >>> end_time = {} >>> >>> response = client.create_span(name, span_id, display_name, start_time, end_time) Args: name (str): The resource name of the span in the following format: :: projects/[PROJECT_ID]/traces/[TRACE_ID]/spans/[SPAN_ID] [TRACE_ID] is a unique identifier for a trace within a project. [SPAN_ID] is a unique identifier for a span within a trace, assigned when the span is created. span_id (str): The [SPAN_ID] portion of the span's resource name. The ID is a 16-character hexadecimal encoding of an 8-byte array. display_name (dict): A description of the span's operation (up to 128 bytes). Stackdriver Trace displays the description in the {% dynamic print site_values.console_name %}. For example, the display name can be a qualified method name or a file name and a line number where the operation is called. A best practice is to use the same display name within an application and at the same call point. This makes it easier to correlate spans in different traces. Contains two fields, value is the truncated name, truncatedByteCount is the number of bytes removed from the original string. If 0, then the string was not shortened. start_time (:class:`~datetime.datetime`): The start time of the span. On the client side, this is the time kept by the local machine where the span execution starts. On the server side, this is the time when the server's application handler starts running. end_time (:class:`~datetime.datetime`): The end time of the span. On the client side, this is the time kept by the local machine where the span execution ends. On the server side, this is the time when the server application handler stops running. parent_span_id (str): The [SPAN_ID] of this span's parent span. If this is a root span, then this field must be empty. attributes (dict): A set of attributes on the span. There is a limit of 32 attributes per span. stack_trace (dict): Stack trace captured at the start of the span. Contains two fields, stackFrames is a list of stack frames in this call stack, a maximum of 128 frames are allowed per StackFrame; stackTraceHashId is used to conserve network bandwidth for duplicate stack traces within a single trace. time_events (dict): The included time events. There can be up to 32 annotations and 128 message events per span. links (dict): A maximum of 128 links are allowed per Span. status (dict): An optional final status for this span. same_process_as_parent_span (bool): A highly recommended but not required flag that identifies when a trace crosses a process boundary. True when the parent_span belongs to the same process as the current span. child_span_count (int): An optional number of child spans that were generated while this span was active. If set, allows implementation to detect missing child spans. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. Returns: A :class:`~google.cloud.trace_v2.types.Span` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Convert the dict type parameters to protobuf display_name = _dict_mapping_to_pb(display_name, "TruncatableString") start_time = _datetime_to_pb_timestamp(start_time) end_time = _datetime_to_pb_timestamp(end_time) if attributes is not None: attributes = _span_attrs_to_pb(attributes, "Attributes") if stack_trace is not None: stack_trace = _dict_mapping_to_pb(stack_trace, "StackTrace") if time_events is not None: time_events = _span_attrs_to_pb(time_events, "TimeEvents") if links is not None: links = _span_attrs_to_pb(links, "Links") if status is not None: status = _status_mapping_to_pb(status) if same_process_as_parent_span is not None: same_process_as_parent_span = _value_to_pb( same_process_as_parent_span, "BoolValue" ) if child_span_count is not None: child_span_count = _value_to_pb(child_span_count, "Int32Value") return self._gapic_api.create_span( name=name, span_id=span_id, display_name=display_name, start_time=start_time, end_time=end_time, parent_span_id=parent_span_id, attributes=attributes, stack_trace=stack_trace, time_events=time_events, links=links, status=status, same_process_as_parent_span=same_process_as_parent_span, child_span_count=child_span_count, )
python
def create_span( self, name, span_id, display_name, start_time, end_time, parent_span_id=None, attributes=None, stack_trace=None, time_events=None, links=None, status=None, same_process_as_parent_span=None, child_span_count=None, retry=method.DEFAULT, timeout=method.DEFAULT, ): """ Creates a new Span. Example: >>> from google.cloud import trace_v2 >>> >>> client = trace_v2.TraceServiceClient() >>> >>> name = client.span_path('[PROJECT]', '[TRACE]', '[SPAN]') >>> span_id = '' >>> display_name = {} >>> start_time = {} >>> end_time = {} >>> >>> response = client.create_span(name, span_id, display_name, start_time, end_time) Args: name (str): The resource name of the span in the following format: :: projects/[PROJECT_ID]/traces/[TRACE_ID]/spans/[SPAN_ID] [TRACE_ID] is a unique identifier for a trace within a project. [SPAN_ID] is a unique identifier for a span within a trace, assigned when the span is created. span_id (str): The [SPAN_ID] portion of the span's resource name. The ID is a 16-character hexadecimal encoding of an 8-byte array. display_name (dict): A description of the span's operation (up to 128 bytes). Stackdriver Trace displays the description in the {% dynamic print site_values.console_name %}. For example, the display name can be a qualified method name or a file name and a line number where the operation is called. A best practice is to use the same display name within an application and at the same call point. This makes it easier to correlate spans in different traces. Contains two fields, value is the truncated name, truncatedByteCount is the number of bytes removed from the original string. If 0, then the string was not shortened. start_time (:class:`~datetime.datetime`): The start time of the span. On the client side, this is the time kept by the local machine where the span execution starts. On the server side, this is the time when the server's application handler starts running. end_time (:class:`~datetime.datetime`): The end time of the span. On the client side, this is the time kept by the local machine where the span execution ends. On the server side, this is the time when the server application handler stops running. parent_span_id (str): The [SPAN_ID] of this span's parent span. If this is a root span, then this field must be empty. attributes (dict): A set of attributes on the span. There is a limit of 32 attributes per span. stack_trace (dict): Stack trace captured at the start of the span. Contains two fields, stackFrames is a list of stack frames in this call stack, a maximum of 128 frames are allowed per StackFrame; stackTraceHashId is used to conserve network bandwidth for duplicate stack traces within a single trace. time_events (dict): The included time events. There can be up to 32 annotations and 128 message events per span. links (dict): A maximum of 128 links are allowed per Span. status (dict): An optional final status for this span. same_process_as_parent_span (bool): A highly recommended but not required flag that identifies when a trace crosses a process boundary. True when the parent_span belongs to the same process as the current span. child_span_count (int): An optional number of child spans that were generated while this span was active. If set, allows implementation to detect missing child spans. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. Returns: A :class:`~google.cloud.trace_v2.types.Span` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Convert the dict type parameters to protobuf display_name = _dict_mapping_to_pb(display_name, "TruncatableString") start_time = _datetime_to_pb_timestamp(start_time) end_time = _datetime_to_pb_timestamp(end_time) if attributes is not None: attributes = _span_attrs_to_pb(attributes, "Attributes") if stack_trace is not None: stack_trace = _dict_mapping_to_pb(stack_trace, "StackTrace") if time_events is not None: time_events = _span_attrs_to_pb(time_events, "TimeEvents") if links is not None: links = _span_attrs_to_pb(links, "Links") if status is not None: status = _status_mapping_to_pb(status) if same_process_as_parent_span is not None: same_process_as_parent_span = _value_to_pb( same_process_as_parent_span, "BoolValue" ) if child_span_count is not None: child_span_count = _value_to_pb(child_span_count, "Int32Value") return self._gapic_api.create_span( name=name, span_id=span_id, display_name=display_name, start_time=start_time, end_time=end_time, parent_span_id=parent_span_id, attributes=attributes, stack_trace=stack_trace, time_events=time_events, links=links, status=status, same_process_as_parent_span=same_process_as_parent_span, child_span_count=child_span_count, )
[ "def", "create_span", "(", "self", ",", "name", ",", "span_id", ",", "display_name", ",", "start_time", ",", "end_time", ",", "parent_span_id", "=", "None", ",", "attributes", "=", "None", ",", "stack_trace", "=", "None", ",", "time_events", "=", "None", ",", "links", "=", "None", ",", "status", "=", "None", ",", "same_process_as_parent_span", "=", "None", ",", "child_span_count", "=", "None", ",", "retry", "=", "method", ".", "DEFAULT", ",", "timeout", "=", "method", ".", "DEFAULT", ",", ")", ":", "# Convert the dict type parameters to protobuf", "display_name", "=", "_dict_mapping_to_pb", "(", "display_name", ",", "\"TruncatableString\"", ")", "start_time", "=", "_datetime_to_pb_timestamp", "(", "start_time", ")", "end_time", "=", "_datetime_to_pb_timestamp", "(", "end_time", ")", "if", "attributes", "is", "not", "None", ":", "attributes", "=", "_span_attrs_to_pb", "(", "attributes", ",", "\"Attributes\"", ")", "if", "stack_trace", "is", "not", "None", ":", "stack_trace", "=", "_dict_mapping_to_pb", "(", "stack_trace", ",", "\"StackTrace\"", ")", "if", "time_events", "is", "not", "None", ":", "time_events", "=", "_span_attrs_to_pb", "(", "time_events", ",", "\"TimeEvents\"", ")", "if", "links", "is", "not", "None", ":", "links", "=", "_span_attrs_to_pb", "(", "links", ",", "\"Links\"", ")", "if", "status", "is", "not", "None", ":", "status", "=", "_status_mapping_to_pb", "(", "status", ")", "if", "same_process_as_parent_span", "is", "not", "None", ":", "same_process_as_parent_span", "=", "_value_to_pb", "(", "same_process_as_parent_span", ",", "\"BoolValue\"", ")", "if", "child_span_count", "is", "not", "None", ":", "child_span_count", "=", "_value_to_pb", "(", "child_span_count", ",", "\"Int32Value\"", ")", "return", "self", ".", "_gapic_api", ".", "create_span", "(", "name", "=", "name", ",", "span_id", "=", "span_id", ",", "display_name", "=", "display_name", ",", "start_time", "=", "start_time", ",", "end_time", "=", "end_time", ",", "parent_span_id", "=", "parent_span_id", ",", "attributes", "=", "attributes", ",", "stack_trace", "=", "stack_trace", ",", "time_events", "=", "time_events", ",", "links", "=", "links", ",", "status", "=", "status", ",", "same_process_as_parent_span", "=", "same_process_as_parent_span", ",", "child_span_count", "=", "child_span_count", ",", ")" ]
Creates a new Span. Example: >>> from google.cloud import trace_v2 >>> >>> client = trace_v2.TraceServiceClient() >>> >>> name = client.span_path('[PROJECT]', '[TRACE]', '[SPAN]') >>> span_id = '' >>> display_name = {} >>> start_time = {} >>> end_time = {} >>> >>> response = client.create_span(name, span_id, display_name, start_time, end_time) Args: name (str): The resource name of the span in the following format: :: projects/[PROJECT_ID]/traces/[TRACE_ID]/spans/[SPAN_ID] [TRACE_ID] is a unique identifier for a trace within a project. [SPAN_ID] is a unique identifier for a span within a trace, assigned when the span is created. span_id (str): The [SPAN_ID] portion of the span's resource name. The ID is a 16-character hexadecimal encoding of an 8-byte array. display_name (dict): A description of the span's operation (up to 128 bytes). Stackdriver Trace displays the description in the {% dynamic print site_values.console_name %}. For example, the display name can be a qualified method name or a file name and a line number where the operation is called. A best practice is to use the same display name within an application and at the same call point. This makes it easier to correlate spans in different traces. Contains two fields, value is the truncated name, truncatedByteCount is the number of bytes removed from the original string. If 0, then the string was not shortened. start_time (:class:`~datetime.datetime`): The start time of the span. On the client side, this is the time kept by the local machine where the span execution starts. On the server side, this is the time when the server's application handler starts running. end_time (:class:`~datetime.datetime`): The end time of the span. On the client side, this is the time kept by the local machine where the span execution ends. On the server side, this is the time when the server application handler stops running. parent_span_id (str): The [SPAN_ID] of this span's parent span. If this is a root span, then this field must be empty. attributes (dict): A set of attributes on the span. There is a limit of 32 attributes per span. stack_trace (dict): Stack trace captured at the start of the span. Contains two fields, stackFrames is a list of stack frames in this call stack, a maximum of 128 frames are allowed per StackFrame; stackTraceHashId is used to conserve network bandwidth for duplicate stack traces within a single trace. time_events (dict): The included time events. There can be up to 32 annotations and 128 message events per span. links (dict): A maximum of 128 links are allowed per Span. status (dict): An optional final status for this span. same_process_as_parent_span (bool): A highly recommended but not required flag that identifies when a trace crosses a process boundary. True when the parent_span belongs to the same process as the current span. child_span_count (int): An optional number of child spans that were generated while this span was active. If set, allows implementation to detect missing child spans. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. Returns: A :class:`~google.cloud.trace_v2.types.Span` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Creates", "a", "new", "Span", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/trace/google/cloud/trace/_gapic.py#L92-L242
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/cluster.py
Cluster.from_pb
def from_pb(cls, cluster_pb, instance): """Creates an cluster instance from a protobuf. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_cluster_from_pb] :end-before: [END bigtable_cluster_from_pb] :type cluster_pb: :class:`instance_pb2.Cluster` :param cluster_pb: An instance protobuf object. :type instance: :class:`google.cloud.bigtable.instance.Instance` :param instance: The instance that owns the cluster. :rtype: :class:`Cluster` :returns: The Cluster parsed from the protobuf response. :raises: :class:`ValueError <exceptions.ValueError>` if the cluster name does not match ``projects/{project}/instances/{instance_id}/clusters/{cluster_id}`` or if the parsed instance ID does not match the istance ID on the client. or if the parsed project ID does not match the project ID on the client. """ match_cluster_name = _CLUSTER_NAME_RE.match(cluster_pb.name) if match_cluster_name is None: raise ValueError( "Cluster protobuf name was not in the " "expected format.", cluster_pb.name, ) if match_cluster_name.group("instance") != instance.instance_id: raise ValueError( "Instance ID on cluster does not match the " "instance ID on the client" ) if match_cluster_name.group("project") != instance._client.project: raise ValueError( "Project ID on cluster does not match the " "project ID on the client" ) cluster_id = match_cluster_name.group("cluster_id") result = cls(cluster_id, instance) result._update_from_pb(cluster_pb) return result
python
def from_pb(cls, cluster_pb, instance): """Creates an cluster instance from a protobuf. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_cluster_from_pb] :end-before: [END bigtable_cluster_from_pb] :type cluster_pb: :class:`instance_pb2.Cluster` :param cluster_pb: An instance protobuf object. :type instance: :class:`google.cloud.bigtable.instance.Instance` :param instance: The instance that owns the cluster. :rtype: :class:`Cluster` :returns: The Cluster parsed from the protobuf response. :raises: :class:`ValueError <exceptions.ValueError>` if the cluster name does not match ``projects/{project}/instances/{instance_id}/clusters/{cluster_id}`` or if the parsed instance ID does not match the istance ID on the client. or if the parsed project ID does not match the project ID on the client. """ match_cluster_name = _CLUSTER_NAME_RE.match(cluster_pb.name) if match_cluster_name is None: raise ValueError( "Cluster protobuf name was not in the " "expected format.", cluster_pb.name, ) if match_cluster_name.group("instance") != instance.instance_id: raise ValueError( "Instance ID on cluster does not match the " "instance ID on the client" ) if match_cluster_name.group("project") != instance._client.project: raise ValueError( "Project ID on cluster does not match the " "project ID on the client" ) cluster_id = match_cluster_name.group("cluster_id") result = cls(cluster_id, instance) result._update_from_pb(cluster_pb) return result
[ "def", "from_pb", "(", "cls", ",", "cluster_pb", ",", "instance", ")", ":", "match_cluster_name", "=", "_CLUSTER_NAME_RE", ".", "match", "(", "cluster_pb", ".", "name", ")", "if", "match_cluster_name", "is", "None", ":", "raise", "ValueError", "(", "\"Cluster protobuf name was not in the \"", "\"expected format.\"", ",", "cluster_pb", ".", "name", ",", ")", "if", "match_cluster_name", ".", "group", "(", "\"instance\"", ")", "!=", "instance", ".", "instance_id", ":", "raise", "ValueError", "(", "\"Instance ID on cluster does not match the \"", "\"instance ID on the client\"", ")", "if", "match_cluster_name", ".", "group", "(", "\"project\"", ")", "!=", "instance", ".", "_client", ".", "project", ":", "raise", "ValueError", "(", "\"Project ID on cluster does not match the \"", "\"project ID on the client\"", ")", "cluster_id", "=", "match_cluster_name", ".", "group", "(", "\"cluster_id\"", ")", "result", "=", "cls", "(", "cluster_id", ",", "instance", ")", "result", ".", "_update_from_pb", "(", "cluster_pb", ")", "return", "result" ]
Creates an cluster instance from a protobuf. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_cluster_from_pb] :end-before: [END bigtable_cluster_from_pb] :type cluster_pb: :class:`instance_pb2.Cluster` :param cluster_pb: An instance protobuf object. :type instance: :class:`google.cloud.bigtable.instance.Instance` :param instance: The instance that owns the cluster. :rtype: :class:`Cluster` :returns: The Cluster parsed from the protobuf response. :raises: :class:`ValueError <exceptions.ValueError>` if the cluster name does not match ``projects/{project}/instances/{instance_id}/clusters/{cluster_id}`` or if the parsed instance ID does not match the istance ID on the client. or if the parsed project ID does not match the project ID on the client.
[ "Creates", "an", "cluster", "instance", "from", "a", "protobuf", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/cluster.py#L94-L137
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/cluster.py
Cluster._update_from_pb
def _update_from_pb(self, cluster_pb): """Refresh self from the server-provided protobuf. Helper for :meth:`from_pb` and :meth:`reload`. """ self.location_id = cluster_pb.location.split("/")[-1] self.serve_nodes = cluster_pb.serve_nodes self.default_storage_type = cluster_pb.default_storage_type self._state = cluster_pb.state
python
def _update_from_pb(self, cluster_pb): """Refresh self from the server-provided protobuf. Helper for :meth:`from_pb` and :meth:`reload`. """ self.location_id = cluster_pb.location.split("/")[-1] self.serve_nodes = cluster_pb.serve_nodes self.default_storage_type = cluster_pb.default_storage_type self._state = cluster_pb.state
[ "def", "_update_from_pb", "(", "self", ",", "cluster_pb", ")", ":", "self", ".", "location_id", "=", "cluster_pb", ".", "location", ".", "split", "(", "\"/\"", ")", "[", "-", "1", "]", "self", ".", "serve_nodes", "=", "cluster_pb", ".", "serve_nodes", "self", ".", "default_storage_type", "=", "cluster_pb", ".", "default_storage_type", "self", ".", "_state", "=", "cluster_pb", ".", "state" ]
Refresh self from the server-provided protobuf. Helper for :meth:`from_pb` and :meth:`reload`.
[ "Refresh", "self", "from", "the", "server", "-", "provided", "protobuf", ".", "Helper", "for", ":", "meth", ":", "from_pb", "and", ":", "meth", ":", "reload", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/cluster.py#L139-L147
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/cluster.py
Cluster.name
def name(self): """Cluster name used in requests. .. note:: This property will not change if ``_instance`` and ``cluster_id`` do not, but the return value is not cached. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_cluster_name] :end-before: [END bigtable_cluster_name] The cluster name is of the form ``"projects/{project}/instances/{instance}/clusters/{cluster_id}"`` :rtype: str :returns: The cluster name. """ return self._instance._client.instance_admin_client.cluster_path( self._instance._client.project, self._instance.instance_id, self.cluster_id )
python
def name(self): """Cluster name used in requests. .. note:: This property will not change if ``_instance`` and ``cluster_id`` do not, but the return value is not cached. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_cluster_name] :end-before: [END bigtable_cluster_name] The cluster name is of the form ``"projects/{project}/instances/{instance}/clusters/{cluster_id}"`` :rtype: str :returns: The cluster name. """ return self._instance._client.instance_admin_client.cluster_path( self._instance._client.project, self._instance.instance_id, self.cluster_id )
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_instance", ".", "_client", ".", "instance_admin_client", ".", "cluster_path", "(", "self", ".", "_instance", ".", "_client", ".", "project", ",", "self", ".", "_instance", ".", "instance_id", ",", "self", ".", "cluster_id", ")" ]
Cluster name used in requests. .. note:: This property will not change if ``_instance`` and ``cluster_id`` do not, but the return value is not cached. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_cluster_name] :end-before: [END bigtable_cluster_name] The cluster name is of the form ``"projects/{project}/instances/{instance}/clusters/{cluster_id}"`` :rtype: str :returns: The cluster name.
[ "Cluster", "name", "used", "in", "requests", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/cluster.py#L150-L172
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/cluster.py
Cluster.reload
def reload(self): """Reload the metadata for this cluster. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_reload_cluster] :end-before: [END bigtable_reload_cluster] """ cluster_pb = self._instance._client.instance_admin_client.get_cluster(self.name) # NOTE: _update_from_pb does not check that the project and # cluster ID on the response match the request. self._update_from_pb(cluster_pb)
python
def reload(self): """Reload the metadata for this cluster. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_reload_cluster] :end-before: [END bigtable_reload_cluster] """ cluster_pb = self._instance._client.instance_admin_client.get_cluster(self.name) # NOTE: _update_from_pb does not check that the project and # cluster ID on the response match the request. self._update_from_pb(cluster_pb)
[ "def", "reload", "(", "self", ")", ":", "cluster_pb", "=", "self", ".", "_instance", ".", "_client", ".", "instance_admin_client", ".", "get_cluster", "(", "self", ".", "name", ")", "# NOTE: _update_from_pb does not check that the project and", "# cluster ID on the response match the request.", "self", ".", "_update_from_pb", "(", "cluster_pb", ")" ]
Reload the metadata for this cluster. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_reload_cluster] :end-before: [END bigtable_reload_cluster]
[ "Reload", "the", "metadata", "for", "this", "cluster", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/cluster.py#L201-L214
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/cluster.py
Cluster.exists
def exists(self): """Check whether the cluster already exists. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_check_cluster_exists] :end-before: [END bigtable_check_cluster_exists] :rtype: bool :returns: True if the table exists, else False. """ client = self._instance._client try: client.instance_admin_client.get_cluster(name=self.name) return True # NOTE: There could be other exceptions that are returned to the user. except NotFound: return False
python
def exists(self): """Check whether the cluster already exists. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_check_cluster_exists] :end-before: [END bigtable_check_cluster_exists] :rtype: bool :returns: True if the table exists, else False. """ client = self._instance._client try: client.instance_admin_client.get_cluster(name=self.name) return True # NOTE: There could be other exceptions that are returned to the user. except NotFound: return False
[ "def", "exists", "(", "self", ")", ":", "client", "=", "self", ".", "_instance", ".", "_client", "try", ":", "client", ".", "instance_admin_client", ".", "get_cluster", "(", "name", "=", "self", ".", "name", ")", "return", "True", "# NOTE: There could be other exceptions that are returned to the user.", "except", "NotFound", ":", "return", "False" ]
Check whether the cluster already exists. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_check_cluster_exists] :end-before: [END bigtable_check_cluster_exists] :rtype: bool :returns: True if the table exists, else False.
[ "Check", "whether", "the", "cluster", "already", "exists", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/cluster.py#L216-L234
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/cluster.py
Cluster.create
def create(self): """Create this cluster. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_create_cluster] :end-before: [END bigtable_create_cluster] .. note:: Uses the ``project``, ``instance`` and ``cluster_id`` on the current :class:`Cluster` in addition to the ``serve_nodes``. To change them before creating, reset the values via .. code:: python cluster.serve_nodes = 8 cluster.cluster_id = 'i-changed-my-mind' before calling :meth:`create`. :rtype: :class:`~google.api_core.operation.Operation` :returns: The long-running operation corresponding to the create operation. """ client = self._instance._client cluster_pb = self._to_pb() return client.instance_admin_client.create_cluster( self._instance.name, self.cluster_id, cluster_pb )
python
def create(self): """Create this cluster. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_create_cluster] :end-before: [END bigtable_create_cluster] .. note:: Uses the ``project``, ``instance`` and ``cluster_id`` on the current :class:`Cluster` in addition to the ``serve_nodes``. To change them before creating, reset the values via .. code:: python cluster.serve_nodes = 8 cluster.cluster_id = 'i-changed-my-mind' before calling :meth:`create`. :rtype: :class:`~google.api_core.operation.Operation` :returns: The long-running operation corresponding to the create operation. """ client = self._instance._client cluster_pb = self._to_pb() return client.instance_admin_client.create_cluster( self._instance.name, self.cluster_id, cluster_pb )
[ "def", "create", "(", "self", ")", ":", "client", "=", "self", ".", "_instance", ".", "_client", "cluster_pb", "=", "self", ".", "_to_pb", "(", ")", "return", "client", ".", "instance_admin_client", ".", "create_cluster", "(", "self", ".", "_instance", ".", "name", ",", "self", ".", "cluster_id", ",", "cluster_pb", ")" ]
Create this cluster. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_create_cluster] :end-before: [END bigtable_create_cluster] .. note:: Uses the ``project``, ``instance`` and ``cluster_id`` on the current :class:`Cluster` in addition to the ``serve_nodes``. To change them before creating, reset the values via .. code:: python cluster.serve_nodes = 8 cluster.cluster_id = 'i-changed-my-mind' before calling :meth:`create`. :rtype: :class:`~google.api_core.operation.Operation` :returns: The long-running operation corresponding to the create operation.
[ "Create", "this", "cluster", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/cluster.py#L236-L267
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/cluster.py
Cluster.update
def update(self): """Update this cluster. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_update_cluster] :end-before: [END bigtable_update_cluster] .. note:: Updates the ``serve_nodes``. If you'd like to change them before updating, reset the values via .. code:: python cluster.serve_nodes = 8 before calling :meth:`update`. :type location: :str:``CreationOnly`` :param location: The location where this cluster's nodes and storage reside. For best performance, clients should be located as close as possible to this cluster. Currently only zones are supported, so values should be of the form ``projects/<project>/locations/<zone>``. :type serve_nodes: :int :param serve_nodes: The number of nodes allocated to this cluster. More nodes enable higher throughput and more consistent performance. :rtype: :class:`Operation` :returns: The long-running operation corresponding to the update operation. """ client = self._instance._client # We are passing `None` for second argument location. # Location is set only at the time of creation of a cluster # and can not be changed after cluster has been created. return client.instance_admin_client.update_cluster( self.name, self.serve_nodes, None )
python
def update(self): """Update this cluster. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_update_cluster] :end-before: [END bigtable_update_cluster] .. note:: Updates the ``serve_nodes``. If you'd like to change them before updating, reset the values via .. code:: python cluster.serve_nodes = 8 before calling :meth:`update`. :type location: :str:``CreationOnly`` :param location: The location where this cluster's nodes and storage reside. For best performance, clients should be located as close as possible to this cluster. Currently only zones are supported, so values should be of the form ``projects/<project>/locations/<zone>``. :type serve_nodes: :int :param serve_nodes: The number of nodes allocated to this cluster. More nodes enable higher throughput and more consistent performance. :rtype: :class:`Operation` :returns: The long-running operation corresponding to the update operation. """ client = self._instance._client # We are passing `None` for second argument location. # Location is set only at the time of creation of a cluster # and can not be changed after cluster has been created. return client.instance_admin_client.update_cluster( self.name, self.serve_nodes, None )
[ "def", "update", "(", "self", ")", ":", "client", "=", "self", ".", "_instance", ".", "_client", "# We are passing `None` for second argument location.", "# Location is set only at the time of creation of a cluster", "# and can not be changed after cluster has been created.", "return", "client", ".", "instance_admin_client", ".", "update_cluster", "(", "self", ".", "name", ",", "self", ".", "serve_nodes", ",", "None", ")" ]
Update this cluster. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_update_cluster] :end-before: [END bigtable_update_cluster] .. note:: Updates the ``serve_nodes``. If you'd like to change them before updating, reset the values via .. code:: python cluster.serve_nodes = 8 before calling :meth:`update`. :type location: :str:``CreationOnly`` :param location: The location where this cluster's nodes and storage reside. For best performance, clients should be located as close as possible to this cluster. Currently only zones are supported, so values should be of the form ``projects/<project>/locations/<zone>``. :type serve_nodes: :int :param serve_nodes: The number of nodes allocated to this cluster. More nodes enable higher throughput and more consistent performance. :rtype: :class:`Operation` :returns: The long-running operation corresponding to the update operation.
[ "Update", "this", "cluster", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/cluster.py#L269-L311
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/cluster.py
Cluster.delete
def delete(self): """Delete this cluster. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_delete_cluster] :end-before: [END bigtable_delete_cluster] Marks a cluster and all of its tables for permanent deletion in 7 days. Immediately upon completion of the request: * Billing will cease for all of the cluster's reserved resources. * The cluster's ``delete_time`` field will be set 7 days in the future. Soon afterward: * All tables within the cluster will become unavailable. At the cluster's ``delete_time``: * The cluster and **all of its tables** will immediately and irrevocably disappear from the API, and their data will be permanently deleted. """ client = self._instance._client client.instance_admin_client.delete_cluster(self.name)
python
def delete(self): """Delete this cluster. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_delete_cluster] :end-before: [END bigtable_delete_cluster] Marks a cluster and all of its tables for permanent deletion in 7 days. Immediately upon completion of the request: * Billing will cease for all of the cluster's reserved resources. * The cluster's ``delete_time`` field will be set 7 days in the future. Soon afterward: * All tables within the cluster will become unavailable. At the cluster's ``delete_time``: * The cluster and **all of its tables** will immediately and irrevocably disappear from the API, and their data will be permanently deleted. """ client = self._instance._client client.instance_admin_client.delete_cluster(self.name)
[ "def", "delete", "(", "self", ")", ":", "client", "=", "self", ".", "_instance", ".", "_client", "client", ".", "instance_admin_client", ".", "delete_cluster", "(", "self", ".", "name", ")" ]
Delete this cluster. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_delete_cluster] :end-before: [END bigtable_delete_cluster] Marks a cluster and all of its tables for permanent deletion in 7 days. Immediately upon completion of the request: * Billing will cease for all of the cluster's reserved resources. * The cluster's ``delete_time`` field will be set 7 days in the future. Soon afterward: * All tables within the cluster will become unavailable. At the cluster's ``delete_time``: * The cluster and **all of its tables** will immediately and irrevocably disappear from the API, and their data will be permanently deleted.
[ "Delete", "this", "cluster", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/cluster.py#L313-L340
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/cluster.py
Cluster._to_pb
def _to_pb(self): """ Create cluster proto buff message for API calls """ client = self._instance._client location = client.instance_admin_client.location_path( client.project, self.location_id ) cluster_pb = instance_pb2.Cluster( location=location, serve_nodes=self.serve_nodes, default_storage_type=self.default_storage_type, ) return cluster_pb
python
def _to_pb(self): """ Create cluster proto buff message for API calls """ client = self._instance._client location = client.instance_admin_client.location_path( client.project, self.location_id ) cluster_pb = instance_pb2.Cluster( location=location, serve_nodes=self.serve_nodes, default_storage_type=self.default_storage_type, ) return cluster_pb
[ "def", "_to_pb", "(", "self", ")", ":", "client", "=", "self", ".", "_instance", ".", "_client", "location", "=", "client", ".", "instance_admin_client", ".", "location_path", "(", "client", ".", "project", ",", "self", ".", "location_id", ")", "cluster_pb", "=", "instance_pb2", ".", "Cluster", "(", "location", "=", "location", ",", "serve_nodes", "=", "self", ".", "serve_nodes", ",", "default_storage_type", "=", "self", ".", "default_storage_type", ",", ")", "return", "cluster_pb" ]
Create cluster proto buff message for API calls
[ "Create", "cluster", "proto", "buff", "message", "for", "API", "calls" ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/cluster.py#L342-L353
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/row_data.py
Cell.from_pb
def from_pb(cls, cell_pb): """Create a new cell from a Cell protobuf. :type cell_pb: :class:`._generated.data_pb2.Cell` :param cell_pb: The protobuf to convert. :rtype: :class:`Cell` :returns: The cell corresponding to the protobuf. """ if cell_pb.labels: return cls(cell_pb.value, cell_pb.timestamp_micros, labels=cell_pb.labels) else: return cls(cell_pb.value, cell_pb.timestamp_micros)
python
def from_pb(cls, cell_pb): """Create a new cell from a Cell protobuf. :type cell_pb: :class:`._generated.data_pb2.Cell` :param cell_pb: The protobuf to convert. :rtype: :class:`Cell` :returns: The cell corresponding to the protobuf. """ if cell_pb.labels: return cls(cell_pb.value, cell_pb.timestamp_micros, labels=cell_pb.labels) else: return cls(cell_pb.value, cell_pb.timestamp_micros)
[ "def", "from_pb", "(", "cls", ",", "cell_pb", ")", ":", "if", "cell_pb", ".", "labels", ":", "return", "cls", "(", "cell_pb", ".", "value", ",", "cell_pb", ".", "timestamp_micros", ",", "labels", "=", "cell_pb", ".", "labels", ")", "else", ":", "return", "cls", "(", "cell_pb", ".", "value", ",", "cell_pb", ".", "timestamp_micros", ")" ]
Create a new cell from a Cell protobuf. :type cell_pb: :class:`._generated.data_pb2.Cell` :param cell_pb: The protobuf to convert. :rtype: :class:`Cell` :returns: The cell corresponding to the protobuf.
[ "Create", "a", "new", "cell", "from", "a", "Cell", "protobuf", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/row_data.py#L59-L71
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/row_data.py
PartialRowData.to_dict
def to_dict(self): """Convert the cells to a dictionary. This is intended to be used with HappyBase, so the column family and column qualiers are combined (with ``:``). :rtype: dict :returns: Dictionary containing all the data in the cells of this row. """ result = {} for column_family_id, columns in six.iteritems(self._cells): for column_qual, cells in six.iteritems(columns): key = _to_bytes(column_family_id) + b":" + _to_bytes(column_qual) result[key] = cells return result
python
def to_dict(self): """Convert the cells to a dictionary. This is intended to be used with HappyBase, so the column family and column qualiers are combined (with ``:``). :rtype: dict :returns: Dictionary containing all the data in the cells of this row. """ result = {} for column_family_id, columns in six.iteritems(self._cells): for column_qual, cells in six.iteritems(columns): key = _to_bytes(column_family_id) + b":" + _to_bytes(column_qual) result[key] = cells return result
[ "def", "to_dict", "(", "self", ")", ":", "result", "=", "{", "}", "for", "column_family_id", ",", "columns", "in", "six", ".", "iteritems", "(", "self", ".", "_cells", ")", ":", "for", "column_qual", ",", "cells", "in", "six", ".", "iteritems", "(", "columns", ")", ":", "key", "=", "_to_bytes", "(", "column_family_id", ")", "+", "b\":\"", "+", "_to_bytes", "(", "column_qual", ")", "result", "[", "key", "]", "=", "cells", "return", "result" ]
Convert the cells to a dictionary. This is intended to be used with HappyBase, so the column family and column qualiers are combined (with ``:``). :rtype: dict :returns: Dictionary containing all the data in the cells of this row.
[ "Convert", "the", "cells", "to", "a", "dictionary", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/row_data.py#L157-L171
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/row_data.py
PartialRowData.find_cells
def find_cells(self, column_family_id, column): """Get a time series of cells stored on this instance. For example: .. literalinclude:: snippets_table.py :start-after: [START bigtable_row_find_cells] :end-before: [END bigtable_row_find_cells] Args: column_family_id (str): The ID of the column family. Must be of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. column (bytes): The column within the column family where the cells are located. Returns: List[~google.cloud.bigtable.row_data.Cell]: The cells stored in the specified column. Raises: KeyError: If ``column_family_id`` is not among the cells stored in this row. KeyError: If ``column`` is not among the cells stored in this row for the given ``column_family_id``. """ try: column_family = self._cells[column_family_id] except KeyError: raise KeyError(_MISSING_COLUMN_FAMILY.format(column_family_id)) try: cells = column_family[column] except KeyError: raise KeyError(_MISSING_COLUMN.format(column, column_family_id)) return cells
python
def find_cells(self, column_family_id, column): """Get a time series of cells stored on this instance. For example: .. literalinclude:: snippets_table.py :start-after: [START bigtable_row_find_cells] :end-before: [END bigtable_row_find_cells] Args: column_family_id (str): The ID of the column family. Must be of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. column (bytes): The column within the column family where the cells are located. Returns: List[~google.cloud.bigtable.row_data.Cell]: The cells stored in the specified column. Raises: KeyError: If ``column_family_id`` is not among the cells stored in this row. KeyError: If ``column`` is not among the cells stored in this row for the given ``column_family_id``. """ try: column_family = self._cells[column_family_id] except KeyError: raise KeyError(_MISSING_COLUMN_FAMILY.format(column_family_id)) try: cells = column_family[column] except KeyError: raise KeyError(_MISSING_COLUMN.format(column, column_family_id)) return cells
[ "def", "find_cells", "(", "self", ",", "column_family_id", ",", "column", ")", ":", "try", ":", "column_family", "=", "self", ".", "_cells", "[", "column_family_id", "]", "except", "KeyError", ":", "raise", "KeyError", "(", "_MISSING_COLUMN_FAMILY", ".", "format", "(", "column_family_id", ")", ")", "try", ":", "cells", "=", "column_family", "[", "column", "]", "except", "KeyError", ":", "raise", "KeyError", "(", "_MISSING_COLUMN", ".", "format", "(", "column", ",", "column_family_id", ")", ")", "return", "cells" ]
Get a time series of cells stored on this instance. For example: .. literalinclude:: snippets_table.py :start-after: [START bigtable_row_find_cells] :end-before: [END bigtable_row_find_cells] Args: column_family_id (str): The ID of the column family. Must be of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. column (bytes): The column within the column family where the cells are located. Returns: List[~google.cloud.bigtable.row_data.Cell]: The cells stored in the specified column. Raises: KeyError: If ``column_family_id`` is not among the cells stored in this row. KeyError: If ``column`` is not among the cells stored in this row for the given ``column_family_id``.
[ "Get", "a", "time", "series", "of", "cells", "stored", "on", "this", "instance", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/row_data.py#L200-L235
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/row_data.py
PartialRowData.cell_value
def cell_value(self, column_family_id, column, index=0): """Get a single cell value stored on this instance. For example: .. literalinclude:: snippets_table.py :start-after: [START bigtable_row_cell_value] :end-before: [END bigtable_row_cell_value] Args: column_family_id (str): The ID of the column family. Must be of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. column (bytes): The column within the column family where the cell is located. index (Optional[int]): The offset within the series of values. If not specified, will return the first cell. Returns: ~google.cloud.bigtable.row_data.Cell value: The cell value stored in the specified column and specified index. Raises: KeyError: If ``column_family_id`` is not among the cells stored in this row. KeyError: If ``column`` is not among the cells stored in this row for the given ``column_family_id``. IndexError: If ``index`` cannot be found within the cells stored in this row for the given ``column_family_id``, ``column`` pair. """ cells = self.find_cells(column_family_id, column) try: cell = cells[index] except (TypeError, IndexError): num_cells = len(cells) msg = _MISSING_INDEX.format(index, column, column_family_id, num_cells) raise IndexError(msg) return cell.value
python
def cell_value(self, column_family_id, column, index=0): """Get a single cell value stored on this instance. For example: .. literalinclude:: snippets_table.py :start-after: [START bigtable_row_cell_value] :end-before: [END bigtable_row_cell_value] Args: column_family_id (str): The ID of the column family. Must be of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. column (bytes): The column within the column family where the cell is located. index (Optional[int]): The offset within the series of values. If not specified, will return the first cell. Returns: ~google.cloud.bigtable.row_data.Cell value: The cell value stored in the specified column and specified index. Raises: KeyError: If ``column_family_id`` is not among the cells stored in this row. KeyError: If ``column`` is not among the cells stored in this row for the given ``column_family_id``. IndexError: If ``index`` cannot be found within the cells stored in this row for the given ``column_family_id``, ``column`` pair. """ cells = self.find_cells(column_family_id, column) try: cell = cells[index] except (TypeError, IndexError): num_cells = len(cells) msg = _MISSING_INDEX.format(index, column, column_family_id, num_cells) raise IndexError(msg) return cell.value
[ "def", "cell_value", "(", "self", ",", "column_family_id", ",", "column", ",", "index", "=", "0", ")", ":", "cells", "=", "self", ".", "find_cells", "(", "column_family_id", ",", "column", ")", "try", ":", "cell", "=", "cells", "[", "index", "]", "except", "(", "TypeError", ",", "IndexError", ")", ":", "num_cells", "=", "len", "(", "cells", ")", "msg", "=", "_MISSING_INDEX", ".", "format", "(", "index", ",", "column", ",", "column_family_id", ",", "num_cells", ")", "raise", "IndexError", "(", "msg", ")", "return", "cell", ".", "value" ]
Get a single cell value stored on this instance. For example: .. literalinclude:: snippets_table.py :start-after: [START bigtable_row_cell_value] :end-before: [END bigtable_row_cell_value] Args: column_family_id (str): The ID of the column family. Must be of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. column (bytes): The column within the column family where the cell is located. index (Optional[int]): The offset within the series of values. If not specified, will return the first cell. Returns: ~google.cloud.bigtable.row_data.Cell value: The cell value stored in the specified column and specified index. Raises: KeyError: If ``column_family_id`` is not among the cells stored in this row. KeyError: If ``column`` is not among the cells stored in this row for the given ``column_family_id``. IndexError: If ``index`` cannot be found within the cells stored in this row for the given ``column_family_id``, ``column`` pair.
[ "Get", "a", "single", "cell", "value", "stored", "on", "this", "instance", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/row_data.py#L237-L276
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/row_data.py
PartialRowData.cell_values
def cell_values(self, column_family_id, column, max_count=None): """Get a time series of cells stored on this instance. For example: .. literalinclude:: snippets_table.py :start-after: [START bigtable_row_cell_values] :end-before: [END bigtable_row_cell_values] Args: column_family_id (str): The ID of the column family. Must be of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. column (bytes): The column within the column family where the cells are located. max_count (int): The maximum number of cells to use. Returns: A generator which provides: cell.value, cell.timestamp_micros for each cell in the list of cells Raises: KeyError: If ``column_family_id`` is not among the cells stored in this row. KeyError: If ``column`` is not among the cells stored in this row for the given ``column_family_id``. """ cells = self.find_cells(column_family_id, column) if max_count is None: max_count = len(cells) for index, cell in enumerate(cells): if index == max_count: break yield cell.value, cell.timestamp_micros
python
def cell_values(self, column_family_id, column, max_count=None): """Get a time series of cells stored on this instance. For example: .. literalinclude:: snippets_table.py :start-after: [START bigtable_row_cell_values] :end-before: [END bigtable_row_cell_values] Args: column_family_id (str): The ID of the column family. Must be of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. column (bytes): The column within the column family where the cells are located. max_count (int): The maximum number of cells to use. Returns: A generator which provides: cell.value, cell.timestamp_micros for each cell in the list of cells Raises: KeyError: If ``column_family_id`` is not among the cells stored in this row. KeyError: If ``column`` is not among the cells stored in this row for the given ``column_family_id``. """ cells = self.find_cells(column_family_id, column) if max_count is None: max_count = len(cells) for index, cell in enumerate(cells): if index == max_count: break yield cell.value, cell.timestamp_micros
[ "def", "cell_values", "(", "self", ",", "column_family_id", ",", "column", ",", "max_count", "=", "None", ")", ":", "cells", "=", "self", ".", "find_cells", "(", "column_family_id", ",", "column", ")", "if", "max_count", "is", "None", ":", "max_count", "=", "len", "(", "cells", ")", "for", "index", ",", "cell", "in", "enumerate", "(", "cells", ")", ":", "if", "index", "==", "max_count", ":", "break", "yield", "cell", ".", "value", ",", "cell", ".", "timestamp_micros" ]
Get a time series of cells stored on this instance. For example: .. literalinclude:: snippets_table.py :start-after: [START bigtable_row_cell_values] :end-before: [END bigtable_row_cell_values] Args: column_family_id (str): The ID of the column family. Must be of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``. column (bytes): The column within the column family where the cells are located. max_count (int): The maximum number of cells to use. Returns: A generator which provides: cell.value, cell.timestamp_micros for each cell in the list of cells Raises: KeyError: If ``column_family_id`` is not among the cells stored in this row. KeyError: If ``column`` is not among the cells stored in this row for the given ``column_family_id``.
[ "Get", "a", "time", "series", "of", "cells", "stored", "on", "this", "instance", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/row_data.py#L278-L312
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/row_data.py
PartialRowsData.consume_all
def consume_all(self, max_loops=None): """Consume the streamed responses until there are no more. .. warning:: This method will be removed in future releases. Please use this class as a generator instead. :type max_loops: int :param max_loops: (Optional) Maximum number of times to try to consume an additional ``ReadRowsResponse``. You can use this to avoid long wait times. """ for row in self: self.rows[row.row_key] = row
python
def consume_all(self, max_loops=None): """Consume the streamed responses until there are no more. .. warning:: This method will be removed in future releases. Please use this class as a generator instead. :type max_loops: int :param max_loops: (Optional) Maximum number of times to try to consume an additional ``ReadRowsResponse``. You can use this to avoid long wait times. """ for row in self: self.rows[row.row_key] = row
[ "def", "consume_all", "(", "self", ",", "max_loops", "=", "None", ")", ":", "for", "row", "in", "self", ":", "self", ".", "rows", "[", "row", ".", "row_key", "]", "=", "row" ]
Consume the streamed responses until there are no more. .. warning:: This method will be removed in future releases. Please use this class as a generator instead. :type max_loops: int :param max_loops: (Optional) Maximum number of times to try to consume an additional ``ReadRowsResponse``. You can use this to avoid long wait times.
[ "Consume", "the", "streamed", "responses", "until", "there", "are", "no", "more", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/row_data.py#L417-L430
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/row_data.py
PartialRowsData._create_retry_request
def _create_retry_request(self): """Helper for :meth:`__iter__`.""" req_manager = _ReadRowsRequestManager( self.request, self.last_scanned_row_key, self._counter ) return req_manager.build_updated_request()
python
def _create_retry_request(self): """Helper for :meth:`__iter__`.""" req_manager = _ReadRowsRequestManager( self.request, self.last_scanned_row_key, self._counter ) return req_manager.build_updated_request()
[ "def", "_create_retry_request", "(", "self", ")", ":", "req_manager", "=", "_ReadRowsRequestManager", "(", "self", ".", "request", ",", "self", ".", "last_scanned_row_key", ",", "self", ".", "_counter", ")", "return", "req_manager", ".", "build_updated_request", "(", ")" ]
Helper for :meth:`__iter__`.
[ "Helper", "for", ":", "meth", ":", "__iter__", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/row_data.py#L432-L437
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/row_data.py
PartialRowsData._on_error
def _on_error(self, exc): """Helper for :meth:`__iter__`.""" # restart the read scan from AFTER the last successfully read row retry_request = self.request if self.last_scanned_row_key: retry_request = self._create_retry_request() self.response_iterator = self.read_method(retry_request)
python
def _on_error(self, exc): """Helper for :meth:`__iter__`.""" # restart the read scan from AFTER the last successfully read row retry_request = self.request if self.last_scanned_row_key: retry_request = self._create_retry_request() self.response_iterator = self.read_method(retry_request)
[ "def", "_on_error", "(", "self", ",", "exc", ")", ":", "# restart the read scan from AFTER the last successfully read row", "retry_request", "=", "self", ".", "request", "if", "self", ".", "last_scanned_row_key", ":", "retry_request", "=", "self", ".", "_create_retry_request", "(", ")", "self", ".", "response_iterator", "=", "self", ".", "read_method", "(", "retry_request", ")" ]
Helper for :meth:`__iter__`.
[ "Helper", "for", ":", "meth", ":", "__iter__", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/row_data.py#L439-L446
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/row_data.py
PartialRowsData._save_current_cell
def _save_current_cell(self): """Helper for :meth:`consume_next`.""" row, cell = self._row, self._cell family = row._cells.setdefault(cell.family_name, {}) qualified = family.setdefault(cell.qualifier, []) complete = Cell.from_pb(cell) qualified.append(complete) self._cell, self._previous_cell = None, cell
python
def _save_current_cell(self): """Helper for :meth:`consume_next`.""" row, cell = self._row, self._cell family = row._cells.setdefault(cell.family_name, {}) qualified = family.setdefault(cell.qualifier, []) complete = Cell.from_pb(cell) qualified.append(complete) self._cell, self._previous_cell = None, cell
[ "def", "_save_current_cell", "(", "self", ")", ":", "row", ",", "cell", "=", "self", ".", "_row", ",", "self", ".", "_cell", "family", "=", "row", ".", "_cells", ".", "setdefault", "(", "cell", ".", "family_name", ",", "{", "}", ")", "qualified", "=", "family", ".", "setdefault", "(", "cell", ".", "qualifier", ",", "[", "]", ")", "complete", "=", "Cell", ".", "from_pb", "(", "cell", ")", "qualified", ".", "append", "(", "complete", ")", "self", ".", "_cell", ",", "self", ".", "_previous_cell", "=", "None", ",", "cell" ]
Helper for :meth:`consume_next`.
[ "Helper", "for", ":", "meth", ":", "consume_next", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/row_data.py#L560-L567
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/row_data.py
PartialRowsData._copy_from_previous
def _copy_from_previous(self, cell): """Helper for :meth:`consume_next`.""" previous = self._previous_cell if previous is not None: if not cell.row_key: cell.row_key = previous.row_key if not cell.family_name: cell.family_name = previous.family_name # NOTE: ``cell.qualifier`` **can** be empty string. if cell.qualifier is None: cell.qualifier = previous.qualifier
python
def _copy_from_previous(self, cell): """Helper for :meth:`consume_next`.""" previous = self._previous_cell if previous is not None: if not cell.row_key: cell.row_key = previous.row_key if not cell.family_name: cell.family_name = previous.family_name # NOTE: ``cell.qualifier`` **can** be empty string. if cell.qualifier is None: cell.qualifier = previous.qualifier
[ "def", "_copy_from_previous", "(", "self", ",", "cell", ")", ":", "previous", "=", "self", ".", "_previous_cell", "if", "previous", "is", "not", "None", ":", "if", "not", "cell", ".", "row_key", ":", "cell", ".", "row_key", "=", "previous", ".", "row_key", "if", "not", "cell", ".", "family_name", ":", "cell", ".", "family_name", "=", "previous", ".", "family_name", "# NOTE: ``cell.qualifier`` **can** be empty string.", "if", "cell", ".", "qualifier", "is", "None", ":", "cell", ".", "qualifier", "=", "previous", ".", "qualifier" ]
Helper for :meth:`consume_next`.
[ "Helper", "for", ":", "meth", ":", "consume_next", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/row_data.py#L569-L579
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/row_data.py
_ReadRowsRequestManager.build_updated_request
def build_updated_request(self): """ Updates the given message request as per last scanned key """ r_kwargs = { "table_name": self.message.table_name, "filter": self.message.filter, } if self.message.rows_limit != 0: r_kwargs["rows_limit"] = max( 1, self.message.rows_limit - self.rows_read_so_far ) # if neither RowSet.row_keys nor RowSet.row_ranges currently exist, # add row_range that starts with last_scanned_key as start_key_open # to request only rows that have not been returned yet if not self.message.HasField("rows"): row_range = data_v2_pb2.RowRange(start_key_open=self.last_scanned_key) r_kwargs["rows"] = data_v2_pb2.RowSet(row_ranges=[row_range]) else: row_keys = self._filter_rows_keys() row_ranges = self._filter_row_ranges() r_kwargs["rows"] = data_v2_pb2.RowSet( row_keys=row_keys, row_ranges=row_ranges ) return data_messages_v2_pb2.ReadRowsRequest(**r_kwargs)
python
def build_updated_request(self): """ Updates the given message request as per last scanned key """ r_kwargs = { "table_name": self.message.table_name, "filter": self.message.filter, } if self.message.rows_limit != 0: r_kwargs["rows_limit"] = max( 1, self.message.rows_limit - self.rows_read_so_far ) # if neither RowSet.row_keys nor RowSet.row_ranges currently exist, # add row_range that starts with last_scanned_key as start_key_open # to request only rows that have not been returned yet if not self.message.HasField("rows"): row_range = data_v2_pb2.RowRange(start_key_open=self.last_scanned_key) r_kwargs["rows"] = data_v2_pb2.RowSet(row_ranges=[row_range]) else: row_keys = self._filter_rows_keys() row_ranges = self._filter_row_ranges() r_kwargs["rows"] = data_v2_pb2.RowSet( row_keys=row_keys, row_ranges=row_ranges ) return data_messages_v2_pb2.ReadRowsRequest(**r_kwargs)
[ "def", "build_updated_request", "(", "self", ")", ":", "r_kwargs", "=", "{", "\"table_name\"", ":", "self", ".", "message", ".", "table_name", ",", "\"filter\"", ":", "self", ".", "message", ".", "filter", ",", "}", "if", "self", ".", "message", ".", "rows_limit", "!=", "0", ":", "r_kwargs", "[", "\"rows_limit\"", "]", "=", "max", "(", "1", ",", "self", ".", "message", ".", "rows_limit", "-", "self", ".", "rows_read_so_far", ")", "# if neither RowSet.row_keys nor RowSet.row_ranges currently exist,", "# add row_range that starts with last_scanned_key as start_key_open", "# to request only rows that have not been returned yet", "if", "not", "self", ".", "message", ".", "HasField", "(", "\"rows\"", ")", ":", "row_range", "=", "data_v2_pb2", ".", "RowRange", "(", "start_key_open", "=", "self", ".", "last_scanned_key", ")", "r_kwargs", "[", "\"rows\"", "]", "=", "data_v2_pb2", ".", "RowSet", "(", "row_ranges", "=", "[", "row_range", "]", ")", "else", ":", "row_keys", "=", "self", ".", "_filter_rows_keys", "(", ")", "row_ranges", "=", "self", ".", "_filter_row_ranges", "(", ")", "r_kwargs", "[", "\"rows\"", "]", "=", "data_v2_pb2", ".", "RowSet", "(", "row_keys", "=", "row_keys", ",", "row_ranges", "=", "row_ranges", ")", "return", "data_messages_v2_pb2", ".", "ReadRowsRequest", "(", "*", "*", "r_kwargs", ")" ]
Updates the given message request as per last scanned key
[ "Updates", "the", "given", "message", "request", "as", "per", "last", "scanned", "key" ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/row_data.py#L604-L629
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/row_data.py
_ReadRowsRequestManager._filter_rows_keys
def _filter_rows_keys(self): """ Helper for :meth:`build_updated_request`""" return [ row_key for row_key in self.message.rows.row_keys if row_key > self.last_scanned_key ]
python
def _filter_rows_keys(self): """ Helper for :meth:`build_updated_request`""" return [ row_key for row_key in self.message.rows.row_keys if row_key > self.last_scanned_key ]
[ "def", "_filter_rows_keys", "(", "self", ")", ":", "return", "[", "row_key", "for", "row_key", "in", "self", ".", "message", ".", "rows", ".", "row_keys", "if", "row_key", ">", "self", ".", "last_scanned_key", "]" ]
Helper for :meth:`build_updated_request`
[ "Helper", "for", ":", "meth", ":", "build_updated_request" ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/row_data.py#L631-L637
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable/row_data.py
_ReadRowsRequestManager._filter_row_ranges
def _filter_row_ranges(self): """ Helper for :meth:`build_updated_request`""" new_row_ranges = [] for row_range in self.message.rows.row_ranges: # if current end_key (open or closed) is set, return its value, # if not, set to empty string (''). # NOTE: Empty string in end_key means "end of table" end_key = self._end_key_set(row_range) # if end_key is already read, skip to the next row_range if end_key and self._key_already_read(end_key): continue # if current start_key (open or closed) is set, return its value, # if not, then set to empty string ('') # NOTE: Empty string in start_key means "beginning of table" start_key = self._start_key_set(row_range) # if start_key was already read or doesn't exist, # create a row_range with last_scanned_key as start_key_open # to be passed to retry request retry_row_range = row_range if self._key_already_read(start_key): retry_row_range = copy.deepcopy(row_range) retry_row_range.start_key_closed = _to_bytes("") retry_row_range.start_key_open = self.last_scanned_key new_row_ranges.append(retry_row_range) return new_row_ranges
python
def _filter_row_ranges(self): """ Helper for :meth:`build_updated_request`""" new_row_ranges = [] for row_range in self.message.rows.row_ranges: # if current end_key (open or closed) is set, return its value, # if not, set to empty string (''). # NOTE: Empty string in end_key means "end of table" end_key = self._end_key_set(row_range) # if end_key is already read, skip to the next row_range if end_key and self._key_already_read(end_key): continue # if current start_key (open or closed) is set, return its value, # if not, then set to empty string ('') # NOTE: Empty string in start_key means "beginning of table" start_key = self._start_key_set(row_range) # if start_key was already read or doesn't exist, # create a row_range with last_scanned_key as start_key_open # to be passed to retry request retry_row_range = row_range if self._key_already_read(start_key): retry_row_range = copy.deepcopy(row_range) retry_row_range.start_key_closed = _to_bytes("") retry_row_range.start_key_open = self.last_scanned_key new_row_ranges.append(retry_row_range) return new_row_ranges
[ "def", "_filter_row_ranges", "(", "self", ")", ":", "new_row_ranges", "=", "[", "]", "for", "row_range", "in", "self", ".", "message", ".", "rows", ".", "row_ranges", ":", "# if current end_key (open or closed) is set, return its value,", "# if not, set to empty string ('').", "# NOTE: Empty string in end_key means \"end of table\"", "end_key", "=", "self", ".", "_end_key_set", "(", "row_range", ")", "# if end_key is already read, skip to the next row_range", "if", "end_key", "and", "self", ".", "_key_already_read", "(", "end_key", ")", ":", "continue", "# if current start_key (open or closed) is set, return its value,", "# if not, then set to empty string ('')", "# NOTE: Empty string in start_key means \"beginning of table\"", "start_key", "=", "self", ".", "_start_key_set", "(", "row_range", ")", "# if start_key was already read or doesn't exist,", "# create a row_range with last_scanned_key as start_key_open", "# to be passed to retry request", "retry_row_range", "=", "row_range", "if", "self", ".", "_key_already_read", "(", "start_key", ")", ":", "retry_row_range", "=", "copy", ".", "deepcopy", "(", "row_range", ")", "retry_row_range", ".", "start_key_closed", "=", "_to_bytes", "(", "\"\"", ")", "retry_row_range", ".", "start_key_open", "=", "self", ".", "last_scanned_key", "new_row_ranges", ".", "append", "(", "retry_row_range", ")", "return", "new_row_ranges" ]
Helper for :meth:`build_updated_request`
[ "Helper", "for", ":", "meth", ":", "build_updated_request" ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/row_data.py#L639-L668
train
googleapis/google-cloud-python
trace/google/cloud/trace_v1/gapic/trace_service_client.py
TraceServiceClient.patch_traces
def patch_traces( self, project_id, traces, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Sends new traces to Stackdriver Trace or updates existing traces. If the ID of a trace that you send matches that of an existing trace, any fields in the existing trace and its spans are overwritten by the provided values, and any new fields provided are merged with the existing trace data. If the ID does not match, a new trace is created. Example: >>> from google.cloud import trace_v1 >>> >>> client = trace_v1.TraceServiceClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `traces`: >>> traces = {} >>> >>> client.patch_traces(project_id, traces) Args: project_id (str): ID of the Cloud project where the trace data is stored. traces (Union[dict, ~google.cloud.trace_v1.types.Traces]): The body of the message. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.trace_v1.types.Traces` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "patch_traces" not in self._inner_api_calls: self._inner_api_calls[ "patch_traces" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.patch_traces, default_retry=self._method_configs["PatchTraces"].retry, default_timeout=self._method_configs["PatchTraces"].timeout, client_info=self._client_info, ) request = trace_pb2.PatchTracesRequest(project_id=project_id, traces=traces) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("project_id", project_id)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) self._inner_api_calls["patch_traces"]( request, retry=retry, timeout=timeout, metadata=metadata )
python
def patch_traces( self, project_id, traces, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Sends new traces to Stackdriver Trace or updates existing traces. If the ID of a trace that you send matches that of an existing trace, any fields in the existing trace and its spans are overwritten by the provided values, and any new fields provided are merged with the existing trace data. If the ID does not match, a new trace is created. Example: >>> from google.cloud import trace_v1 >>> >>> client = trace_v1.TraceServiceClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `traces`: >>> traces = {} >>> >>> client.patch_traces(project_id, traces) Args: project_id (str): ID of the Cloud project where the trace data is stored. traces (Union[dict, ~google.cloud.trace_v1.types.Traces]): The body of the message. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.trace_v1.types.Traces` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "patch_traces" not in self._inner_api_calls: self._inner_api_calls[ "patch_traces" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.patch_traces, default_retry=self._method_configs["PatchTraces"].retry, default_timeout=self._method_configs["PatchTraces"].timeout, client_info=self._client_info, ) request = trace_pb2.PatchTracesRequest(project_id=project_id, traces=traces) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("project_id", project_id)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) self._inner_api_calls["patch_traces"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def", "patch_traces", "(", "self", ",", "project_id", ",", "traces", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"patch_traces\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"patch_traces\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "patch_traces", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"PatchTraces\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"PatchTraces\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "trace_pb2", ".", "PatchTracesRequest", "(", "project_id", "=", "project_id", ",", "traces", "=", "traces", ")", "if", "metadata", "is", "None", ":", "metadata", "=", "[", "]", "metadata", "=", "list", "(", "metadata", ")", "try", ":", "routing_header", "=", "[", "(", "\"project_id\"", ",", "project_id", ")", "]", "except", "AttributeError", ":", "pass", "else", ":", "routing_metadata", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "routing_header", ".", "to_grpc_metadata", "(", "routing_header", ")", "metadata", ".", "append", "(", "routing_metadata", ")", "self", ".", "_inner_api_calls", "[", "\"patch_traces\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
Sends new traces to Stackdriver Trace or updates existing traces. If the ID of a trace that you send matches that of an existing trace, any fields in the existing trace and its spans are overwritten by the provided values, and any new fields provided are merged with the existing trace data. If the ID does not match, a new trace is created. Example: >>> from google.cloud import trace_v1 >>> >>> client = trace_v1.TraceServiceClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `traces`: >>> traces = {} >>> >>> client.patch_traces(project_id, traces) Args: project_id (str): ID of the Cloud project where the trace data is stored. traces (Union[dict, ~google.cloud.trace_v1.types.Traces]): The body of the message. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.trace_v1.types.Traces` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Sends", "new", "traces", "to", "Stackdriver", "Trace", "or", "updates", "existing", "traces", ".", "If", "the", "ID", "of", "a", "trace", "that", "you", "send", "matches", "that", "of", "an", "existing", "trace", "any", "fields", "in", "the", "existing", "trace", "and", "its", "spans", "are", "overwritten", "by", "the", "provided", "values", "and", "any", "new", "fields", "provided", "are", "merged", "with", "the", "existing", "trace", "data", ".", "If", "the", "ID", "does", "not", "match", "a", "new", "trace", "is", "created", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/trace/google/cloud/trace_v1/gapic/trace_service_client.py#L177-L254
train
googleapis/google-cloud-python
trace/google/cloud/trace_v1/gapic/trace_service_client.py
TraceServiceClient.get_trace
def get_trace( self, project_id, trace_id, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Gets a single trace by its ID. Example: >>> from google.cloud import trace_v1 >>> >>> client = trace_v1.TraceServiceClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `trace_id`: >>> trace_id = '' >>> >>> response = client.get_trace(project_id, trace_id) Args: project_id (str): ID of the Cloud project where the trace data is stored. trace_id (str): ID of the trace to return. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.trace_v1.types.Trace` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "get_trace" not in self._inner_api_calls: self._inner_api_calls[ "get_trace" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_trace, default_retry=self._method_configs["GetTrace"].retry, default_timeout=self._method_configs["GetTrace"].timeout, client_info=self._client_info, ) request = trace_pb2.GetTraceRequest(project_id=project_id, trace_id=trace_id) return self._inner_api_calls["get_trace"]( request, retry=retry, timeout=timeout, metadata=metadata )
python
def get_trace( self, project_id, trace_id, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Gets a single trace by its ID. Example: >>> from google.cloud import trace_v1 >>> >>> client = trace_v1.TraceServiceClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `trace_id`: >>> trace_id = '' >>> >>> response = client.get_trace(project_id, trace_id) Args: project_id (str): ID of the Cloud project where the trace data is stored. trace_id (str): ID of the trace to return. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.trace_v1.types.Trace` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "get_trace" not in self._inner_api_calls: self._inner_api_calls[ "get_trace" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_trace, default_retry=self._method_configs["GetTrace"].retry, default_timeout=self._method_configs["GetTrace"].timeout, client_info=self._client_info, ) request = trace_pb2.GetTraceRequest(project_id=project_id, trace_id=trace_id) return self._inner_api_calls["get_trace"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def", "get_trace", "(", "self", ",", "project_id", ",", "trace_id", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"get_trace\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"get_trace\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "get_trace", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"GetTrace\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"GetTrace\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "trace_pb2", ".", "GetTraceRequest", "(", "project_id", "=", "project_id", ",", "trace_id", "=", "trace_id", ")", "return", "self", ".", "_inner_api_calls", "[", "\"get_trace\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
Gets a single trace by its ID. Example: >>> from google.cloud import trace_v1 >>> >>> client = trace_v1.TraceServiceClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # TODO: Initialize `trace_id`: >>> trace_id = '' >>> >>> response = client.get_trace(project_id, trace_id) Args: project_id (str): ID of the Cloud project where the trace data is stored. trace_id (str): ID of the trace to return. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.trace_v1.types.Trace` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Gets", "a", "single", "trace", "by", "its", "ID", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/trace/google/cloud/trace_v1/gapic/trace_service_client.py#L256-L316
train
googleapis/google-cloud-python
trace/google/cloud/trace_v1/gapic/trace_service_client.py
TraceServiceClient.list_traces
def list_traces( self, project_id, view=None, page_size=None, start_time=None, end_time=None, filter_=None, order_by=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Returns of a list of traces that match the specified filter conditions. Example: >>> from google.cloud import trace_v1 >>> >>> client = trace_v1.TraceServiceClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # Iterate over all results >>> for element in client.list_traces(project_id): ... # process element ... pass >>> >>> >>> # Alternatively: >>> >>> # Iterate over results one page at a time >>> for page in client.list_traces(project_id).pages: ... for element in page: ... # process element ... pass Args: project_id (str): ID of the Cloud project where the trace data is stored. view (~google.cloud.trace_v1.types.ViewType): Type of data returned for traces in the list. Optional. Default is ``MINIMAL``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. start_time (Union[dict, ~google.cloud.trace_v1.types.Timestamp]): Start of the time interval (inclusive) during which the trace data was collected from the application. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.trace_v1.types.Timestamp` end_time (Union[dict, ~google.cloud.trace_v1.types.Timestamp]): End of the time interval (inclusive) during which the trace data was collected from the application. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.trace_v1.types.Timestamp` filter_ (str): An optional filter against labels for the request. By default, searches use prefix matching. To specify exact match, prepend a plus symbol (``+``) to the search term. Multiple terms are ANDed. Syntax: - ``root:NAME_PREFIX`` or ``NAME_PREFIX``: Return traces where any root span starts with ``NAME_PREFIX``. - ``+root:NAME`` or ``+NAME``: Return traces where any root span's name is exactly ``NAME``. - ``span:NAME_PREFIX``: Return traces where any span starts with ``NAME_PREFIX``. - ``+span:NAME``: Return traces where any span's name is exactly ``NAME``. - ``latency:DURATION``: Return traces whose overall latency is greater or equal to than ``DURATION``. Accepted units are nanoseconds (``ns``), milliseconds (``ms``), and seconds (``s``). Default is ``ms``. For example, ``latency:24ms`` returns traces whose overall latency is greater than or equal to 24 milliseconds. - ``label:LABEL_KEY``: Return all traces containing the specified label key (exact match, case-sensitive) regardless of the key:value pair's value (including empty values). - ``LABEL_KEY:VALUE_PREFIX``: Return all traces containing the specified label key (exact match, case-sensitive) whose value starts with ``VALUE_PREFIX``. Both a key and a value must be specified. - ``+LABEL_KEY:VALUE``: Return all traces containing a key:value pair exactly matching the specified text. Both a key and a value must be specified. - ``method:VALUE``: Equivalent to ``/http/method:VALUE``. - ``url:VALUE``: Equivalent to ``/http/url:VALUE``. order_by (str): Field used to sort the returned traces. Optional. Can be one of the following: - ``trace_id`` - ``name`` (``name`` field of root span in the trace) - ``duration`` (difference between ``end_time`` and ``start_time`` fields of the root span) - ``start`` (``start_time`` field of the root span) Descending order can be specified by appending ``desc`` to the sort field (for example, ``name desc``). Only one sort field is permitted. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.gax.PageIterator` instance. By default, this is an iterable of :class:`~google.cloud.trace_v1.types.Trace` instances. This object can also be configured to iterate over the pages of the response through the `options` parameter. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "list_traces" not in self._inner_api_calls: self._inner_api_calls[ "list_traces" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_traces, default_retry=self._method_configs["ListTraces"].retry, default_timeout=self._method_configs["ListTraces"].timeout, client_info=self._client_info, ) request = trace_pb2.ListTracesRequest( project_id=project_id, view=view, page_size=page_size, start_time=start_time, end_time=end_time, filter=filter_, order_by=order_by, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("project_id", project_id)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls["list_traces"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field="traces", request_token_field="page_token", response_token_field="next_page_token", ) return iterator
python
def list_traces( self, project_id, view=None, page_size=None, start_time=None, end_time=None, filter_=None, order_by=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Returns of a list of traces that match the specified filter conditions. Example: >>> from google.cloud import trace_v1 >>> >>> client = trace_v1.TraceServiceClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # Iterate over all results >>> for element in client.list_traces(project_id): ... # process element ... pass >>> >>> >>> # Alternatively: >>> >>> # Iterate over results one page at a time >>> for page in client.list_traces(project_id).pages: ... for element in page: ... # process element ... pass Args: project_id (str): ID of the Cloud project where the trace data is stored. view (~google.cloud.trace_v1.types.ViewType): Type of data returned for traces in the list. Optional. Default is ``MINIMAL``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. start_time (Union[dict, ~google.cloud.trace_v1.types.Timestamp]): Start of the time interval (inclusive) during which the trace data was collected from the application. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.trace_v1.types.Timestamp` end_time (Union[dict, ~google.cloud.trace_v1.types.Timestamp]): End of the time interval (inclusive) during which the trace data was collected from the application. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.trace_v1.types.Timestamp` filter_ (str): An optional filter against labels for the request. By default, searches use prefix matching. To specify exact match, prepend a plus symbol (``+``) to the search term. Multiple terms are ANDed. Syntax: - ``root:NAME_PREFIX`` or ``NAME_PREFIX``: Return traces where any root span starts with ``NAME_PREFIX``. - ``+root:NAME`` or ``+NAME``: Return traces where any root span's name is exactly ``NAME``. - ``span:NAME_PREFIX``: Return traces where any span starts with ``NAME_PREFIX``. - ``+span:NAME``: Return traces where any span's name is exactly ``NAME``. - ``latency:DURATION``: Return traces whose overall latency is greater or equal to than ``DURATION``. Accepted units are nanoseconds (``ns``), milliseconds (``ms``), and seconds (``s``). Default is ``ms``. For example, ``latency:24ms`` returns traces whose overall latency is greater than or equal to 24 milliseconds. - ``label:LABEL_KEY``: Return all traces containing the specified label key (exact match, case-sensitive) regardless of the key:value pair's value (including empty values). - ``LABEL_KEY:VALUE_PREFIX``: Return all traces containing the specified label key (exact match, case-sensitive) whose value starts with ``VALUE_PREFIX``. Both a key and a value must be specified. - ``+LABEL_KEY:VALUE``: Return all traces containing a key:value pair exactly matching the specified text. Both a key and a value must be specified. - ``method:VALUE``: Equivalent to ``/http/method:VALUE``. - ``url:VALUE``: Equivalent to ``/http/url:VALUE``. order_by (str): Field used to sort the returned traces. Optional. Can be one of the following: - ``trace_id`` - ``name`` (``name`` field of root span in the trace) - ``duration`` (difference between ``end_time`` and ``start_time`` fields of the root span) - ``start`` (``start_time`` field of the root span) Descending order can be specified by appending ``desc`` to the sort field (for example, ``name desc``). Only one sort field is permitted. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.gax.PageIterator` instance. By default, this is an iterable of :class:`~google.cloud.trace_v1.types.Trace` instances. This object can also be configured to iterate over the pages of the response through the `options` parameter. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "list_traces" not in self._inner_api_calls: self._inner_api_calls[ "list_traces" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_traces, default_retry=self._method_configs["ListTraces"].retry, default_timeout=self._method_configs["ListTraces"].timeout, client_info=self._client_info, ) request = trace_pb2.ListTracesRequest( project_id=project_id, view=view, page_size=page_size, start_time=start_time, end_time=end_time, filter=filter_, order_by=order_by, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("project_id", project_id)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( self._inner_api_calls["list_traces"], retry=retry, timeout=timeout, metadata=metadata, ), request=request, items_field="traces", request_token_field="page_token", response_token_field="next_page_token", ) return iterator
[ "def", "list_traces", "(", "self", ",", "project_id", ",", "view", "=", "None", ",", "page_size", "=", "None", ",", "start_time", "=", "None", ",", "end_time", "=", "None", ",", "filter_", "=", "None", ",", "order_by", "=", "None", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"list_traces\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"list_traces\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "list_traces", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"ListTraces\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"ListTraces\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "trace_pb2", ".", "ListTracesRequest", "(", "project_id", "=", "project_id", ",", "view", "=", "view", ",", "page_size", "=", "page_size", ",", "start_time", "=", "start_time", ",", "end_time", "=", "end_time", ",", "filter", "=", "filter_", ",", "order_by", "=", "order_by", ",", ")", "if", "metadata", "is", "None", ":", "metadata", "=", "[", "]", "metadata", "=", "list", "(", "metadata", ")", "try", ":", "routing_header", "=", "[", "(", "\"project_id\"", ",", "project_id", ")", "]", "except", "AttributeError", ":", "pass", "else", ":", "routing_metadata", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "routing_header", ".", "to_grpc_metadata", "(", "routing_header", ")", "metadata", ".", "append", "(", "routing_metadata", ")", "iterator", "=", "google", ".", "api_core", ".", "page_iterator", ".", "GRPCIterator", "(", "client", "=", "None", ",", "method", "=", "functools", ".", "partial", "(", "self", ".", "_inner_api_calls", "[", "\"list_traces\"", "]", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ",", ")", ",", "request", "=", "request", ",", "items_field", "=", "\"traces\"", ",", "request_token_field", "=", "\"page_token\"", ",", "response_token_field", "=", "\"next_page_token\"", ",", ")", "return", "iterator" ]
Returns of a list of traces that match the specified filter conditions. Example: >>> from google.cloud import trace_v1 >>> >>> client = trace_v1.TraceServiceClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> >>> # Iterate over all results >>> for element in client.list_traces(project_id): ... # process element ... pass >>> >>> >>> # Alternatively: >>> >>> # Iterate over results one page at a time >>> for page in client.list_traces(project_id).pages: ... for element in page: ... # process element ... pass Args: project_id (str): ID of the Cloud project where the trace data is stored. view (~google.cloud.trace_v1.types.ViewType): Type of data returned for traces in the list. Optional. Default is ``MINIMAL``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. start_time (Union[dict, ~google.cloud.trace_v1.types.Timestamp]): Start of the time interval (inclusive) during which the trace data was collected from the application. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.trace_v1.types.Timestamp` end_time (Union[dict, ~google.cloud.trace_v1.types.Timestamp]): End of the time interval (inclusive) during which the trace data was collected from the application. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.trace_v1.types.Timestamp` filter_ (str): An optional filter against labels for the request. By default, searches use prefix matching. To specify exact match, prepend a plus symbol (``+``) to the search term. Multiple terms are ANDed. Syntax: - ``root:NAME_PREFIX`` or ``NAME_PREFIX``: Return traces where any root span starts with ``NAME_PREFIX``. - ``+root:NAME`` or ``+NAME``: Return traces where any root span's name is exactly ``NAME``. - ``span:NAME_PREFIX``: Return traces where any span starts with ``NAME_PREFIX``. - ``+span:NAME``: Return traces where any span's name is exactly ``NAME``. - ``latency:DURATION``: Return traces whose overall latency is greater or equal to than ``DURATION``. Accepted units are nanoseconds (``ns``), milliseconds (``ms``), and seconds (``s``). Default is ``ms``. For example, ``latency:24ms`` returns traces whose overall latency is greater than or equal to 24 milliseconds. - ``label:LABEL_KEY``: Return all traces containing the specified label key (exact match, case-sensitive) regardless of the key:value pair's value (including empty values). - ``LABEL_KEY:VALUE_PREFIX``: Return all traces containing the specified label key (exact match, case-sensitive) whose value starts with ``VALUE_PREFIX``. Both a key and a value must be specified. - ``+LABEL_KEY:VALUE``: Return all traces containing a key:value pair exactly matching the specified text. Both a key and a value must be specified. - ``method:VALUE``: Equivalent to ``/http/method:VALUE``. - ``url:VALUE``: Equivalent to ``/http/url:VALUE``. order_by (str): Field used to sort the returned traces. Optional. Can be one of the following: - ``trace_id`` - ``name`` (``name`` field of root span in the trace) - ``duration`` (difference between ``end_time`` and ``start_time`` fields of the root span) - ``start`` (``start_time`` field of the root span) Descending order can be specified by appending ``desc`` to the sort field (for example, ``name desc``). Only one sort field is permitted. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.gax.PageIterator` instance. By default, this is an iterable of :class:`~google.cloud.trace_v1.types.Trace` instances. This object can also be configured to iterate over the pages of the response through the `options` parameter. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Returns", "of", "a", "list", "of", "traces", "that", "match", "the", "specified", "filter", "conditions", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/trace/google/cloud/trace_v1/gapic/trace_service_client.py#L318-L486
train
googleapis/google-cloud-python
redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py
CloudRedisClient.get_instance
def get_instance( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Gets the details of a specific Redis instance. Example: >>> from google.cloud import redis_v1beta1 >>> >>> client = redis_v1beta1.CloudRedisClient() >>> >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') >>> >>> response = client.get_instance(name) Args: name (str): Required. Redis instance resource name using the form: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.redis_v1beta1.types.Instance` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "get_instance" not in self._inner_api_calls: self._inner_api_calls[ "get_instance" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance, default_retry=self._method_configs["GetInstance"].retry, default_timeout=self._method_configs["GetInstance"].timeout, client_info=self._client_info, ) request = cloud_redis_pb2.GetInstanceRequest(name=name) return self._inner_api_calls["get_instance"]( request, retry=retry, timeout=timeout, metadata=metadata )
python
def get_instance( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Gets the details of a specific Redis instance. Example: >>> from google.cloud import redis_v1beta1 >>> >>> client = redis_v1beta1.CloudRedisClient() >>> >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') >>> >>> response = client.get_instance(name) Args: name (str): Required. Redis instance resource name using the form: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.redis_v1beta1.types.Instance` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "get_instance" not in self._inner_api_calls: self._inner_api_calls[ "get_instance" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_instance, default_retry=self._method_configs["GetInstance"].retry, default_timeout=self._method_configs["GetInstance"].timeout, client_info=self._client_info, ) request = cloud_redis_pb2.GetInstanceRequest(name=name) return self._inner_api_calls["get_instance"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def", "get_instance", "(", "self", ",", "name", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"get_instance\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"get_instance\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "get_instance", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"GetInstance\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"GetInstance\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "cloud_redis_pb2", ".", "GetInstanceRequest", "(", "name", "=", "name", ")", "return", "self", ".", "_inner_api_calls", "[", "\"get_instance\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
Gets the details of a specific Redis instance. Example: >>> from google.cloud import redis_v1beta1 >>> >>> client = redis_v1beta1.CloudRedisClient() >>> >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') >>> >>> response = client.get_instance(name) Args: name (str): Required. Redis instance resource name using the form: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.redis_v1beta1.types.Instance` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Gets", "the", "details", "of", "a", "specific", "Redis", "instance", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py#L310-L366
train
googleapis/google-cloud-python
redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py
CloudRedisClient.update_instance
def update_instance( self, update_mask, instance, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Updates the metadata and configuration of a specific Redis instance. Completed longrunning.Operation will contain the new instance object in the response field. The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. Example: >>> from google.cloud import redis_v1beta1 >>> >>> client = redis_v1beta1.CloudRedisClient() >>> >>> paths_element = 'display_name' >>> paths_element_2 = 'memory_size_gb' >>> paths = [paths_element, paths_element_2] >>> update_mask = {'paths': paths} >>> display_name = 'UpdatedDisplayName' >>> memory_size_gb = 4 >>> instance = {'display_name': display_name, 'memory_size_gb': memory_size_gb} >>> >>> response = client.update_instance(update_mask, instance) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: update_mask (Union[dict, ~google.cloud.redis_v1beta1.types.FieldMask]): Required. Mask of fields to update. At least one path must be supplied in this field. The elements of the repeated paths field may only include these fields from ``Instance``: \* ``display_name`` \* ``labels`` \* ``memory_size_gb`` \* ``redis_config`` If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.redis_v1beta1.types.FieldMask` instance (Union[dict, ~google.cloud.redis_v1beta1.types.Instance]): Required. Update description. Only fields specified in update\_mask are updated. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.redis_v1beta1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "update_instance" not in self._inner_api_calls: self._inner_api_calls[ "update_instance" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_instance, default_retry=self._method_configs["UpdateInstance"].retry, default_timeout=self._method_configs["UpdateInstance"].timeout, client_info=self._client_info, ) request = cloud_redis_pb2.UpdateInstanceRequest( update_mask=update_mask, instance=instance ) operation = self._inner_api_calls["update_instance"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, cloud_redis_pb2.Instance, metadata_type=any_pb2.Any, )
python
def update_instance( self, update_mask, instance, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Updates the metadata and configuration of a specific Redis instance. Completed longrunning.Operation will contain the new instance object in the response field. The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. Example: >>> from google.cloud import redis_v1beta1 >>> >>> client = redis_v1beta1.CloudRedisClient() >>> >>> paths_element = 'display_name' >>> paths_element_2 = 'memory_size_gb' >>> paths = [paths_element, paths_element_2] >>> update_mask = {'paths': paths} >>> display_name = 'UpdatedDisplayName' >>> memory_size_gb = 4 >>> instance = {'display_name': display_name, 'memory_size_gb': memory_size_gb} >>> >>> response = client.update_instance(update_mask, instance) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: update_mask (Union[dict, ~google.cloud.redis_v1beta1.types.FieldMask]): Required. Mask of fields to update. At least one path must be supplied in this field. The elements of the repeated paths field may only include these fields from ``Instance``: \* ``display_name`` \* ``labels`` \* ``memory_size_gb`` \* ``redis_config`` If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.redis_v1beta1.types.FieldMask` instance (Union[dict, ~google.cloud.redis_v1beta1.types.Instance]): Required. Update description. Only fields specified in update\_mask are updated. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.redis_v1beta1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "update_instance" not in self._inner_api_calls: self._inner_api_calls[ "update_instance" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_instance, default_retry=self._method_configs["UpdateInstance"].retry, default_timeout=self._method_configs["UpdateInstance"].timeout, client_info=self._client_info, ) request = cloud_redis_pb2.UpdateInstanceRequest( update_mask=update_mask, instance=instance ) operation = self._inner_api_calls["update_instance"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, cloud_redis_pb2.Instance, metadata_type=any_pb2.Any, )
[ "def", "update_instance", "(", "self", ",", "update_mask", ",", "instance", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"update_instance\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"update_instance\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "update_instance", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"UpdateInstance\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"UpdateInstance\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "cloud_redis_pb2", ".", "UpdateInstanceRequest", "(", "update_mask", "=", "update_mask", ",", "instance", "=", "instance", ")", "operation", "=", "self", ".", "_inner_api_calls", "[", "\"update_instance\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")", "return", "google", ".", "api_core", ".", "operation", ".", "from_gapic", "(", "operation", ",", "self", ".", "transport", ".", "_operations_client", ",", "cloud_redis_pb2", ".", "Instance", ",", "metadata_type", "=", "any_pb2", ".", "Any", ",", ")" ]
Updates the metadata and configuration of a specific Redis instance. Completed longrunning.Operation will contain the new instance object in the response field. The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. Example: >>> from google.cloud import redis_v1beta1 >>> >>> client = redis_v1beta1.CloudRedisClient() >>> >>> paths_element = 'display_name' >>> paths_element_2 = 'memory_size_gb' >>> paths = [paths_element, paths_element_2] >>> update_mask = {'paths': paths} >>> display_name = 'UpdatedDisplayName' >>> memory_size_gb = 4 >>> instance = {'display_name': display_name, 'memory_size_gb': memory_size_gb} >>> >>> response = client.update_instance(update_mask, instance) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: update_mask (Union[dict, ~google.cloud.redis_v1beta1.types.FieldMask]): Required. Mask of fields to update. At least one path must be supplied in this field. The elements of the repeated paths field may only include these fields from ``Instance``: \* ``display_name`` \* ``labels`` \* ``memory_size_gb`` \* ``redis_config`` If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.redis_v1beta1.types.FieldMask` instance (Union[dict, ~google.cloud.redis_v1beta1.types.Instance]): Required. Update description. Only fields specified in update\_mask are updated. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.redis_v1beta1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Updates", "the", "metadata", "and", "configuration", "of", "a", "specific", "Redis", "instance", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py#L474-L567
train
googleapis/google-cloud-python
redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py
CloudRedisClient.delete_instance
def delete_instance( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Deletes a specific Redis instance. Instance stops serving and data is deleted. Example: >>> from google.cloud import redis_v1beta1 >>> >>> client = redis_v1beta1.CloudRedisClient() >>> >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') >>> >>> response = client.delete_instance(name) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: name (str): Required. Redis instance resource name using the form: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "delete_instance" not in self._inner_api_calls: self._inner_api_calls[ "delete_instance" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_instance, default_retry=self._method_configs["DeleteInstance"].retry, default_timeout=self._method_configs["DeleteInstance"].timeout, client_info=self._client_info, ) request = cloud_redis_pb2.DeleteInstanceRequest(name=name) operation = self._inner_api_calls["delete_instance"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, empty_pb2.Empty, metadata_type=any_pb2.Any, )
python
def delete_instance( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Deletes a specific Redis instance. Instance stops serving and data is deleted. Example: >>> from google.cloud import redis_v1beta1 >>> >>> client = redis_v1beta1.CloudRedisClient() >>> >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') >>> >>> response = client.delete_instance(name) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: name (str): Required. Redis instance resource name using the form: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "delete_instance" not in self._inner_api_calls: self._inner_api_calls[ "delete_instance" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_instance, default_retry=self._method_configs["DeleteInstance"].retry, default_timeout=self._method_configs["DeleteInstance"].timeout, client_info=self._client_info, ) request = cloud_redis_pb2.DeleteInstanceRequest(name=name) operation = self._inner_api_calls["delete_instance"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, empty_pb2.Empty, metadata_type=any_pb2.Any, )
[ "def", "delete_instance", "(", "self", ",", "name", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"delete_instance\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"delete_instance\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "delete_instance", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"DeleteInstance\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"DeleteInstance\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "cloud_redis_pb2", ".", "DeleteInstanceRequest", "(", "name", "=", "name", ")", "operation", "=", "self", ".", "_inner_api_calls", "[", "\"delete_instance\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")", "return", "google", ".", "api_core", ".", "operation", ".", "from_gapic", "(", "operation", ",", "self", ".", "transport", ".", "_operations_client", ",", "empty_pb2", ".", "Empty", ",", "metadata_type", "=", "any_pb2", ".", "Any", ",", ")" ]
Deletes a specific Redis instance. Instance stops serving and data is deleted. Example: >>> from google.cloud import redis_v1beta1 >>> >>> client = redis_v1beta1.CloudRedisClient() >>> >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') >>> >>> response = client.delete_instance(name) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: name (str): Required. Redis instance resource name using the form: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Deletes", "a", "specific", "Redis", "instance", ".", "Instance", "stops", "serving", "and", "data", "is", "deleted", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py#L569-L641
train
googleapis/google-cloud-python
speech/google/cloud/speech_v1p1beta1/gapic/speech_client.py
SpeechClient.recognize
def recognize( self, config, audio, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Performs synchronous speech recognition: receive results after all audio has been sent and processed. Example: >>> from google.cloud import speech_v1p1beta1 >>> from google.cloud.speech_v1p1beta1 import enums >>> >>> client = speech_v1p1beta1.SpeechClient() >>> >>> encoding = enums.RecognitionConfig.AudioEncoding.FLAC >>> sample_rate_hertz = 44100 >>> language_code = 'en-US' >>> config = {'encoding': encoding, 'sample_rate_hertz': sample_rate_hertz, 'language_code': language_code} >>> uri = 'gs://bucket_name/file_name.flac' >>> audio = {'uri': uri} >>> >>> response = client.recognize(config, audio) Args: config (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionConfig]): *Required* Provides information to the recognizer that specifies how to process the request. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1p1beta1.types.RecognitionConfig` audio (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionAudio]): *Required* The audio data to be recognized. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1p1beta1.types.RecognitionAudio` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.speech_v1p1beta1.types.RecognizeResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "recognize" not in self._inner_api_calls: self._inner_api_calls[ "recognize" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.recognize, default_retry=self._method_configs["Recognize"].retry, default_timeout=self._method_configs["Recognize"].timeout, client_info=self._client_info, ) request = cloud_speech_pb2.RecognizeRequest(config=config, audio=audio) return self._inner_api_calls["recognize"]( request, retry=retry, timeout=timeout, metadata=metadata )
python
def recognize( self, config, audio, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Performs synchronous speech recognition: receive results after all audio has been sent and processed. Example: >>> from google.cloud import speech_v1p1beta1 >>> from google.cloud.speech_v1p1beta1 import enums >>> >>> client = speech_v1p1beta1.SpeechClient() >>> >>> encoding = enums.RecognitionConfig.AudioEncoding.FLAC >>> sample_rate_hertz = 44100 >>> language_code = 'en-US' >>> config = {'encoding': encoding, 'sample_rate_hertz': sample_rate_hertz, 'language_code': language_code} >>> uri = 'gs://bucket_name/file_name.flac' >>> audio = {'uri': uri} >>> >>> response = client.recognize(config, audio) Args: config (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionConfig]): *Required* Provides information to the recognizer that specifies how to process the request. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1p1beta1.types.RecognitionConfig` audio (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionAudio]): *Required* The audio data to be recognized. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1p1beta1.types.RecognitionAudio` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.speech_v1p1beta1.types.RecognizeResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "recognize" not in self._inner_api_calls: self._inner_api_calls[ "recognize" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.recognize, default_retry=self._method_configs["Recognize"].retry, default_timeout=self._method_configs["Recognize"].timeout, client_info=self._client_info, ) request = cloud_speech_pb2.RecognizeRequest(config=config, audio=audio) return self._inner_api_calls["recognize"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def", "recognize", "(", "self", ",", "config", ",", "audio", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"recognize\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"recognize\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "recognize", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"Recognize\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"Recognize\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "cloud_speech_pb2", ".", "RecognizeRequest", "(", "config", "=", "config", ",", "audio", "=", "audio", ")", "return", "self", ".", "_inner_api_calls", "[", "\"recognize\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
Performs synchronous speech recognition: receive results after all audio has been sent and processed. Example: >>> from google.cloud import speech_v1p1beta1 >>> from google.cloud.speech_v1p1beta1 import enums >>> >>> client = speech_v1p1beta1.SpeechClient() >>> >>> encoding = enums.RecognitionConfig.AudioEncoding.FLAC >>> sample_rate_hertz = 44100 >>> language_code = 'en-US' >>> config = {'encoding': encoding, 'sample_rate_hertz': sample_rate_hertz, 'language_code': language_code} >>> uri = 'gs://bucket_name/file_name.flac' >>> audio = {'uri': uri} >>> >>> response = client.recognize(config, audio) Args: config (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionConfig]): *Required* Provides information to the recognizer that specifies how to process the request. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1p1beta1.types.RecognitionConfig` audio (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionAudio]): *Required* The audio data to be recognized. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1p1beta1.types.RecognitionAudio` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.speech_v1p1beta1.types.RecognizeResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Performs", "synchronous", "speech", "recognition", ":", "receive", "results", "after", "all", "audio", "has", "been", "sent", "and", "processed", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/speech/google/cloud/speech_v1p1beta1/gapic/speech_client.py#L170-L240
train
googleapis/google-cloud-python
speech/google/cloud/speech_v1p1beta1/gapic/speech_client.py
SpeechClient.long_running_recognize
def long_running_recognize( self, config, audio, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Performs asynchronous speech recognition: receive results via the google.longrunning.Operations interface. Returns either an ``Operation.error`` or an ``Operation.response`` which contains a ``LongRunningRecognizeResponse`` message. Example: >>> from google.cloud import speech_v1p1beta1 >>> from google.cloud.speech_v1p1beta1 import enums >>> >>> client = speech_v1p1beta1.SpeechClient() >>> >>> encoding = enums.RecognitionConfig.AudioEncoding.FLAC >>> sample_rate_hertz = 44100 >>> language_code = 'en-US' >>> config = {'encoding': encoding, 'sample_rate_hertz': sample_rate_hertz, 'language_code': language_code} >>> uri = 'gs://bucket_name/file_name.flac' >>> audio = {'uri': uri} >>> >>> response = client.long_running_recognize(config, audio) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: config (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionConfig]): *Required* Provides information to the recognizer that specifies how to process the request. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1p1beta1.types.RecognitionConfig` audio (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionAudio]): *Required* The audio data to be recognized. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1p1beta1.types.RecognitionAudio` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.speech_v1p1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "long_running_recognize" not in self._inner_api_calls: self._inner_api_calls[ "long_running_recognize" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.long_running_recognize, default_retry=self._method_configs["LongRunningRecognize"].retry, default_timeout=self._method_configs["LongRunningRecognize"].timeout, client_info=self._client_info, ) request = cloud_speech_pb2.LongRunningRecognizeRequest( config=config, audio=audio ) operation = self._inner_api_calls["long_running_recognize"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, cloud_speech_pb2.LongRunningRecognizeResponse, metadata_type=cloud_speech_pb2.LongRunningRecognizeMetadata, )
python
def long_running_recognize( self, config, audio, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Performs asynchronous speech recognition: receive results via the google.longrunning.Operations interface. Returns either an ``Operation.error`` or an ``Operation.response`` which contains a ``LongRunningRecognizeResponse`` message. Example: >>> from google.cloud import speech_v1p1beta1 >>> from google.cloud.speech_v1p1beta1 import enums >>> >>> client = speech_v1p1beta1.SpeechClient() >>> >>> encoding = enums.RecognitionConfig.AudioEncoding.FLAC >>> sample_rate_hertz = 44100 >>> language_code = 'en-US' >>> config = {'encoding': encoding, 'sample_rate_hertz': sample_rate_hertz, 'language_code': language_code} >>> uri = 'gs://bucket_name/file_name.flac' >>> audio = {'uri': uri} >>> >>> response = client.long_running_recognize(config, audio) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: config (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionConfig]): *Required* Provides information to the recognizer that specifies how to process the request. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1p1beta1.types.RecognitionConfig` audio (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionAudio]): *Required* The audio data to be recognized. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1p1beta1.types.RecognitionAudio` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.speech_v1p1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "long_running_recognize" not in self._inner_api_calls: self._inner_api_calls[ "long_running_recognize" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.long_running_recognize, default_retry=self._method_configs["LongRunningRecognize"].retry, default_timeout=self._method_configs["LongRunningRecognize"].timeout, client_info=self._client_info, ) request = cloud_speech_pb2.LongRunningRecognizeRequest( config=config, audio=audio ) operation = self._inner_api_calls["long_running_recognize"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, cloud_speech_pb2.LongRunningRecognizeResponse, metadata_type=cloud_speech_pb2.LongRunningRecognizeMetadata, )
[ "def", "long_running_recognize", "(", "self", ",", "config", ",", "audio", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"long_running_recognize\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"long_running_recognize\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "long_running_recognize", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"LongRunningRecognize\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"LongRunningRecognize\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "cloud_speech_pb2", ".", "LongRunningRecognizeRequest", "(", "config", "=", "config", ",", "audio", "=", "audio", ")", "operation", "=", "self", ".", "_inner_api_calls", "[", "\"long_running_recognize\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")", "return", "google", ".", "api_core", ".", "operation", ".", "from_gapic", "(", "operation", ",", "self", ".", "transport", ".", "_operations_client", ",", "cloud_speech_pb2", ".", "LongRunningRecognizeResponse", ",", "metadata_type", "=", "cloud_speech_pb2", ".", "LongRunningRecognizeMetadata", ",", ")" ]
Performs asynchronous speech recognition: receive results via the google.longrunning.Operations interface. Returns either an ``Operation.error`` or an ``Operation.response`` which contains a ``LongRunningRecognizeResponse`` message. Example: >>> from google.cloud import speech_v1p1beta1 >>> from google.cloud.speech_v1p1beta1 import enums >>> >>> client = speech_v1p1beta1.SpeechClient() >>> >>> encoding = enums.RecognitionConfig.AudioEncoding.FLAC >>> sample_rate_hertz = 44100 >>> language_code = 'en-US' >>> config = {'encoding': encoding, 'sample_rate_hertz': sample_rate_hertz, 'language_code': language_code} >>> uri = 'gs://bucket_name/file_name.flac' >>> audio = {'uri': uri} >>> >>> response = client.long_running_recognize(config, audio) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: config (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionConfig]): *Required* Provides information to the recognizer that specifies how to process the request. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1p1beta1.types.RecognitionConfig` audio (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionAudio]): *Required* The audio data to be recognized. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1p1beta1.types.RecognitionAudio` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.speech_v1p1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Performs", "asynchronous", "speech", "recognition", ":", "receive", "results", "via", "the", "google", ".", "longrunning", ".", "Operations", "interface", ".", "Returns", "either", "an", "Operation", ".", "error", "or", "an", "Operation", ".", "response", "which", "contains", "a", "LongRunningRecognizeResponse", "message", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/speech/google/cloud/speech_v1p1beta1/gapic/speech_client.py#L242-L331
train
googleapis/google-cloud-python
dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py
WorkflowTemplateServiceClient.region_path
def region_path(cls, project, region): """Return a fully-qualified region string.""" return google.api_core.path_template.expand( "projects/{project}/regions/{region}", project=project, region=region )
python
def region_path(cls, project, region): """Return a fully-qualified region string.""" return google.api_core.path_template.expand( "projects/{project}/regions/{region}", project=project, region=region )
[ "def", "region_path", "(", "cls", ",", "project", ",", "region", ")", ":", "return", "google", ".", "api_core", ".", "path_template", ".", "expand", "(", "\"projects/{project}/regions/{region}\"", ",", "project", "=", "project", ",", "region", "=", "region", ")" ]
Return a fully-qualified region string.
[ "Return", "a", "fully", "-", "qualified", "region", "string", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py#L87-L91
train
googleapis/google-cloud-python
dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py
WorkflowTemplateServiceClient.workflow_template_path
def workflow_template_path(cls, project, region, workflow_template): """Return a fully-qualified workflow_template string.""" return google.api_core.path_template.expand( "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}", project=project, region=region, workflow_template=workflow_template, )
python
def workflow_template_path(cls, project, region, workflow_template): """Return a fully-qualified workflow_template string.""" return google.api_core.path_template.expand( "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}", project=project, region=region, workflow_template=workflow_template, )
[ "def", "workflow_template_path", "(", "cls", ",", "project", ",", "region", ",", "workflow_template", ")", ":", "return", "google", ".", "api_core", ".", "path_template", ".", "expand", "(", "\"projects/{project}/regions/{region}/workflowTemplates/{workflow_template}\"", ",", "project", "=", "project", ",", "region", "=", "region", ",", "workflow_template", "=", "workflow_template", ",", ")" ]
Return a fully-qualified workflow_template string.
[ "Return", "a", "fully", "-", "qualified", "workflow_template", "string", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py#L94-L101
train
googleapis/google-cloud-python
dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py
WorkflowTemplateServiceClient.create_workflow_template
def create_workflow_template( self, parent, template, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates new workflow template. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() >>> >>> parent = client.region_path('[PROJECT]', '[REGION]') >>> >>> # TODO: Initialize `template`: >>> template = {} >>> >>> response = client.create_workflow_template(parent, template) Args: parent (str): Required. The "resource name" of the region, as described in https://cloud.google.com/apis/design/resource\_names of the form ``projects/{project_id}/regions/{region}`` template (Union[dict, ~google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Required. The Dataproc workflow template to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "create_workflow_template" not in self._inner_api_calls: self._inner_api_calls[ "create_workflow_template" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_workflow_template, default_retry=self._method_configs["CreateWorkflowTemplate"].retry, default_timeout=self._method_configs["CreateWorkflowTemplate"].timeout, client_info=self._client_info, ) request = workflow_templates_pb2.CreateWorkflowTemplateRequest( parent=parent, template=template ) return self._inner_api_calls["create_workflow_template"]( request, retry=retry, timeout=timeout, metadata=metadata )
python
def create_workflow_template( self, parent, template, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates new workflow template. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() >>> >>> parent = client.region_path('[PROJECT]', '[REGION]') >>> >>> # TODO: Initialize `template`: >>> template = {} >>> >>> response = client.create_workflow_template(parent, template) Args: parent (str): Required. The "resource name" of the region, as described in https://cloud.google.com/apis/design/resource\_names of the form ``projects/{project_id}/regions/{region}`` template (Union[dict, ~google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Required. The Dataproc workflow template to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "create_workflow_template" not in self._inner_api_calls: self._inner_api_calls[ "create_workflow_template" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_workflow_template, default_retry=self._method_configs["CreateWorkflowTemplate"].retry, default_timeout=self._method_configs["CreateWorkflowTemplate"].timeout, client_info=self._client_info, ) request = workflow_templates_pb2.CreateWorkflowTemplateRequest( parent=parent, template=template ) return self._inner_api_calls["create_workflow_template"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def", "create_workflow_template", "(", "self", ",", "parent", ",", "template", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"create_workflow_template\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"create_workflow_template\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "create_workflow_template", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"CreateWorkflowTemplate\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"CreateWorkflowTemplate\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "workflow_templates_pb2", ".", "CreateWorkflowTemplateRequest", "(", "parent", "=", "parent", ",", "template", "=", "template", ")", "return", "self", ".", "_inner_api_calls", "[", "\"create_workflow_template\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
Creates new workflow template. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() >>> >>> parent = client.region_path('[PROJECT]', '[REGION]') >>> >>> # TODO: Initialize `template`: >>> template = {} >>> >>> response = client.create_workflow_template(parent, template) Args: parent (str): Required. The "resource name" of the region, as described in https://cloud.google.com/apis/design/resource\_names of the form ``projects/{project_id}/regions/{region}`` template (Union[dict, ~google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Required. The Dataproc workflow template to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Creates", "new", "workflow", "template", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py#L202-L268
train
googleapis/google-cloud-python
dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py
WorkflowTemplateServiceClient.get_workflow_template
def get_workflow_template( self, name, version=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Retrieves the latest workflow template. Can retrieve previously instantiated template by specifying optional version parameter. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() >>> >>> name = client.workflow_template_path('[PROJECT]', '[REGION]', '[WORKFLOW_TEMPLATE]') >>> >>> response = client.get_workflow_template(name) Args: name (str): Required. The "resource name" of the workflow template, as described in https://cloud.google.com/apis/design/resource\_names of the form ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to retrieve. Only previously instatiated versions can be retrieved. If unspecified, retrieves the current version. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "get_workflow_template" not in self._inner_api_calls: self._inner_api_calls[ "get_workflow_template" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_workflow_template, default_retry=self._method_configs["GetWorkflowTemplate"].retry, default_timeout=self._method_configs["GetWorkflowTemplate"].timeout, client_info=self._client_info, ) request = workflow_templates_pb2.GetWorkflowTemplateRequest( name=name, version=version ) return self._inner_api_calls["get_workflow_template"]( request, retry=retry, timeout=timeout, metadata=metadata )
python
def get_workflow_template( self, name, version=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Retrieves the latest workflow template. Can retrieve previously instantiated template by specifying optional version parameter. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() >>> >>> name = client.workflow_template_path('[PROJECT]', '[REGION]', '[WORKFLOW_TEMPLATE]') >>> >>> response = client.get_workflow_template(name) Args: name (str): Required. The "resource name" of the workflow template, as described in https://cloud.google.com/apis/design/resource\_names of the form ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to retrieve. Only previously instatiated versions can be retrieved. If unspecified, retrieves the current version. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "get_workflow_template" not in self._inner_api_calls: self._inner_api_calls[ "get_workflow_template" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_workflow_template, default_retry=self._method_configs["GetWorkflowTemplate"].retry, default_timeout=self._method_configs["GetWorkflowTemplate"].timeout, client_info=self._client_info, ) request = workflow_templates_pb2.GetWorkflowTemplateRequest( name=name, version=version ) return self._inner_api_calls["get_workflow_template"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def", "get_workflow_template", "(", "self", ",", "name", ",", "version", "=", "None", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"get_workflow_template\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"get_workflow_template\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "get_workflow_template", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"GetWorkflowTemplate\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"GetWorkflowTemplate\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "workflow_templates_pb2", ".", "GetWorkflowTemplateRequest", "(", "name", "=", "name", ",", "version", "=", "version", ")", "return", "self", ".", "_inner_api_calls", "[", "\"get_workflow_template\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
Retrieves the latest workflow template. Can retrieve previously instantiated template by specifying optional version parameter. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() >>> >>> name = client.workflow_template_path('[PROJECT]', '[REGION]', '[WORKFLOW_TEMPLATE]') >>> >>> response = client.get_workflow_template(name) Args: name (str): Required. The "resource name" of the workflow template, as described in https://cloud.google.com/apis/design/resource\_names of the form ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to retrieve. Only previously instatiated versions can be retrieved. If unspecified, retrieves the current version. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Retrieves", "the", "latest", "workflow", "template", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py#L270-L336
train
googleapis/google-cloud-python
dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py
WorkflowTemplateServiceClient.instantiate_workflow_template
def instantiate_workflow_template( self, name, version=None, instance_id=None, request_id=None, parameters=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Instantiates a template and begins execution. The returned Operation can be used to track execution of workflow by polling ``operations.get``. The Operation will complete when entire workflow is finished. The running workflow can be aborted via ``operations.cancel``. This will cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. The ``Operation.metadata`` will be ``WorkflowMetadata``. On successful completion, ``Operation.response`` will be ``Empty``. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() >>> >>> name = client.workflow_template_path('[PROJECT]', '[REGION]', '[WORKFLOW_TEMPLATE]') >>> >>> response = client.instantiate_workflow_template(name) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: name (str): Required. The "resource name" of the workflow template, as described in https://cloud.google.com/apis/design/resource\_names of the form ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to instantiate. If specified, the workflow will be instantiated only if the current version of the workflow template has the supplied version. This option cannot be used to instantiate a previous version of workflow template. instance_id (str): Deprecated. Please use ``request_id`` field instead. request_id (str): Optional. A tag that prevents multiple concurrent workflow instances with the same tag from running. This mitigates risk of concurrent instances started due to retries. It is recommended to always set this value to a `UUID <https://en.wikipedia.org/wiki/Universally_unique_identifier>`__. The tag must contain only letters (a-z, A-Z), numbers (0-9), underscores (\_), and hyphens (-). The maximum length is 40 characters. parameters (dict[str -> str]): Optional. Map from parameter names to values that should be used for those parameters. Values may not exceed 100 characters. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "instantiate_workflow_template" not in self._inner_api_calls: self._inner_api_calls[ "instantiate_workflow_template" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.instantiate_workflow_template, default_retry=self._method_configs["InstantiateWorkflowTemplate"].retry, default_timeout=self._method_configs[ "InstantiateWorkflowTemplate" ].timeout, client_info=self._client_info, ) request = workflow_templates_pb2.InstantiateWorkflowTemplateRequest( name=name, version=version, instance_id=instance_id, request_id=request_id, parameters=parameters, ) operation = self._inner_api_calls["instantiate_workflow_template"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, empty_pb2.Empty, metadata_type=workflow_templates_pb2.WorkflowMetadata, )
python
def instantiate_workflow_template( self, name, version=None, instance_id=None, request_id=None, parameters=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Instantiates a template and begins execution. The returned Operation can be used to track execution of workflow by polling ``operations.get``. The Operation will complete when entire workflow is finished. The running workflow can be aborted via ``operations.cancel``. This will cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. The ``Operation.metadata`` will be ``WorkflowMetadata``. On successful completion, ``Operation.response`` will be ``Empty``. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() >>> >>> name = client.workflow_template_path('[PROJECT]', '[REGION]', '[WORKFLOW_TEMPLATE]') >>> >>> response = client.instantiate_workflow_template(name) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: name (str): Required. The "resource name" of the workflow template, as described in https://cloud.google.com/apis/design/resource\_names of the form ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to instantiate. If specified, the workflow will be instantiated only if the current version of the workflow template has the supplied version. This option cannot be used to instantiate a previous version of workflow template. instance_id (str): Deprecated. Please use ``request_id`` field instead. request_id (str): Optional. A tag that prevents multiple concurrent workflow instances with the same tag from running. This mitigates risk of concurrent instances started due to retries. It is recommended to always set this value to a `UUID <https://en.wikipedia.org/wiki/Universally_unique_identifier>`__. The tag must contain only letters (a-z, A-Z), numbers (0-9), underscores (\_), and hyphens (-). The maximum length is 40 characters. parameters (dict[str -> str]): Optional. Map from parameter names to values that should be used for those parameters. Values may not exceed 100 characters. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "instantiate_workflow_template" not in self._inner_api_calls: self._inner_api_calls[ "instantiate_workflow_template" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.instantiate_workflow_template, default_retry=self._method_configs["InstantiateWorkflowTemplate"].retry, default_timeout=self._method_configs[ "InstantiateWorkflowTemplate" ].timeout, client_info=self._client_info, ) request = workflow_templates_pb2.InstantiateWorkflowTemplateRequest( name=name, version=version, instance_id=instance_id, request_id=request_id, parameters=parameters, ) operation = self._inner_api_calls["instantiate_workflow_template"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, empty_pb2.Empty, metadata_type=workflow_templates_pb2.WorkflowMetadata, )
[ "def", "instantiate_workflow_template", "(", "self", ",", "name", ",", "version", "=", "None", ",", "instance_id", "=", "None", ",", "request_id", "=", "None", ",", "parameters", "=", "None", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"instantiate_workflow_template\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"instantiate_workflow_template\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "instantiate_workflow_template", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"InstantiateWorkflowTemplate\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"InstantiateWorkflowTemplate\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "workflow_templates_pb2", ".", "InstantiateWorkflowTemplateRequest", "(", "name", "=", "name", ",", "version", "=", "version", ",", "instance_id", "=", "instance_id", ",", "request_id", "=", "request_id", ",", "parameters", "=", "parameters", ",", ")", "operation", "=", "self", ".", "_inner_api_calls", "[", "\"instantiate_workflow_template\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")", "return", "google", ".", "api_core", ".", "operation", ".", "from_gapic", "(", "operation", ",", "self", ".", "transport", ".", "_operations_client", ",", "empty_pb2", ".", "Empty", ",", "metadata_type", "=", "workflow_templates_pb2", ".", "WorkflowMetadata", ",", ")" ]
Instantiates a template and begins execution. The returned Operation can be used to track execution of workflow by polling ``operations.get``. The Operation will complete when entire workflow is finished. The running workflow can be aborted via ``operations.cancel``. This will cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. The ``Operation.metadata`` will be ``WorkflowMetadata``. On successful completion, ``Operation.response`` will be ``Empty``. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() >>> >>> name = client.workflow_template_path('[PROJECT]', '[REGION]', '[WORKFLOW_TEMPLATE]') >>> >>> response = client.instantiate_workflow_template(name) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: name (str): Required. The "resource name" of the workflow template, as described in https://cloud.google.com/apis/design/resource\_names of the form ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to instantiate. If specified, the workflow will be instantiated only if the current version of the workflow template has the supplied version. This option cannot be used to instantiate a previous version of workflow template. instance_id (str): Deprecated. Please use ``request_id`` field instead. request_id (str): Optional. A tag that prevents multiple concurrent workflow instances with the same tag from running. This mitigates risk of concurrent instances started due to retries. It is recommended to always set this value to a `UUID <https://en.wikipedia.org/wiki/Universally_unique_identifier>`__. The tag must contain only letters (a-z, A-Z), numbers (0-9), underscores (\_), and hyphens (-). The maximum length is 40 characters. parameters (dict[str -> str]): Optional. Map from parameter names to values that should be used for those parameters. Values may not exceed 100 characters. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Instantiates", "a", "template", "and", "begins", "execution", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py#L338-L451
train
googleapis/google-cloud-python
dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py
WorkflowTemplateServiceClient.instantiate_inline_workflow_template
def instantiate_inline_workflow_template( self, parent, template, instance_id=None, request_id=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Instantiates a template and begins execution. This method is equivalent to executing the sequence ``CreateWorkflowTemplate``, ``InstantiateWorkflowTemplate``, ``DeleteWorkflowTemplate``. The returned Operation can be used to track execution of workflow by polling ``operations.get``. The Operation will complete when entire workflow is finished. The running workflow can be aborted via ``operations.cancel``. This will cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. The ``Operation.metadata`` will be ``WorkflowMetadata``. On successful completion, ``Operation.response`` will be ``Empty``. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() >>> >>> parent = client.region_path('[PROJECT]', '[REGION]') >>> >>> # TODO: Initialize `template`: >>> template = {} >>> >>> response = client.instantiate_inline_workflow_template(parent, template) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): Required. The "resource name" of the workflow template region, as described in https://cloud.google.com/apis/design/resource\_names of the form ``projects/{project_id}/regions/{region}`` template (Union[dict, ~google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Required. The workflow template to instantiate. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance_id (str): Deprecated. Please use ``request_id`` field instead. request_id (str): Optional. A tag that prevents multiple concurrent workflow instances with the same tag from running. This mitigates risk of concurrent instances started due to retries. It is recommended to always set this value to a `UUID <https://en.wikipedia.org/wiki/Universally_unique_identifier>`__. The tag must contain only letters (a-z, A-Z), numbers (0-9), underscores (\_), and hyphens (-). The maximum length is 40 characters. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "instantiate_inline_workflow_template" not in self._inner_api_calls: self._inner_api_calls[ "instantiate_inline_workflow_template" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.instantiate_inline_workflow_template, default_retry=self._method_configs[ "InstantiateInlineWorkflowTemplate" ].retry, default_timeout=self._method_configs[ "InstantiateInlineWorkflowTemplate" ].timeout, client_info=self._client_info, ) request = workflow_templates_pb2.InstantiateInlineWorkflowTemplateRequest( parent=parent, template=template, instance_id=instance_id, request_id=request_id, ) operation = self._inner_api_calls["instantiate_inline_workflow_template"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, empty_pb2.Empty, metadata_type=workflow_templates_pb2.WorkflowMetadata, )
python
def instantiate_inline_workflow_template( self, parent, template, instance_id=None, request_id=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Instantiates a template and begins execution. This method is equivalent to executing the sequence ``CreateWorkflowTemplate``, ``InstantiateWorkflowTemplate``, ``DeleteWorkflowTemplate``. The returned Operation can be used to track execution of workflow by polling ``operations.get``. The Operation will complete when entire workflow is finished. The running workflow can be aborted via ``operations.cancel``. This will cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. The ``Operation.metadata`` will be ``WorkflowMetadata``. On successful completion, ``Operation.response`` will be ``Empty``. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() >>> >>> parent = client.region_path('[PROJECT]', '[REGION]') >>> >>> # TODO: Initialize `template`: >>> template = {} >>> >>> response = client.instantiate_inline_workflow_template(parent, template) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): Required. The "resource name" of the workflow template region, as described in https://cloud.google.com/apis/design/resource\_names of the form ``projects/{project_id}/regions/{region}`` template (Union[dict, ~google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Required. The workflow template to instantiate. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance_id (str): Deprecated. Please use ``request_id`` field instead. request_id (str): Optional. A tag that prevents multiple concurrent workflow instances with the same tag from running. This mitigates risk of concurrent instances started due to retries. It is recommended to always set this value to a `UUID <https://en.wikipedia.org/wiki/Universally_unique_identifier>`__. The tag must contain only letters (a-z, A-Z), numbers (0-9), underscores (\_), and hyphens (-). The maximum length is 40 characters. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "instantiate_inline_workflow_template" not in self._inner_api_calls: self._inner_api_calls[ "instantiate_inline_workflow_template" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.instantiate_inline_workflow_template, default_retry=self._method_configs[ "InstantiateInlineWorkflowTemplate" ].retry, default_timeout=self._method_configs[ "InstantiateInlineWorkflowTemplate" ].timeout, client_info=self._client_info, ) request = workflow_templates_pb2.InstantiateInlineWorkflowTemplateRequest( parent=parent, template=template, instance_id=instance_id, request_id=request_id, ) operation = self._inner_api_calls["instantiate_inline_workflow_template"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, empty_pb2.Empty, metadata_type=workflow_templates_pb2.WorkflowMetadata, )
[ "def", "instantiate_inline_workflow_template", "(", "self", ",", "parent", ",", "template", ",", "instance_id", "=", "None", ",", "request_id", "=", "None", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"instantiate_inline_workflow_template\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"instantiate_inline_workflow_template\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "instantiate_inline_workflow_template", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"InstantiateInlineWorkflowTemplate\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"InstantiateInlineWorkflowTemplate\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "workflow_templates_pb2", ".", "InstantiateInlineWorkflowTemplateRequest", "(", "parent", "=", "parent", ",", "template", "=", "template", ",", "instance_id", "=", "instance_id", ",", "request_id", "=", "request_id", ",", ")", "operation", "=", "self", ".", "_inner_api_calls", "[", "\"instantiate_inline_workflow_template\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")", "return", "google", ".", "api_core", ".", "operation", ".", "from_gapic", "(", "operation", ",", "self", ".", "transport", ".", "_operations_client", ",", "empty_pb2", ".", "Empty", ",", "metadata_type", "=", "workflow_templates_pb2", ".", "WorkflowMetadata", ",", ")" ]
Instantiates a template and begins execution. This method is equivalent to executing the sequence ``CreateWorkflowTemplate``, ``InstantiateWorkflowTemplate``, ``DeleteWorkflowTemplate``. The returned Operation can be used to track execution of workflow by polling ``operations.get``. The Operation will complete when entire workflow is finished. The running workflow can be aborted via ``operations.cancel``. This will cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. The ``Operation.metadata`` will be ``WorkflowMetadata``. On successful completion, ``Operation.response`` will be ``Empty``. Example: >>> from google.cloud import dataproc_v1beta2 >>> >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient() >>> >>> parent = client.region_path('[PROJECT]', '[REGION]') >>> >>> # TODO: Initialize `template`: >>> template = {} >>> >>> response = client.instantiate_inline_workflow_template(parent, template) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): Required. The "resource name" of the workflow template region, as described in https://cloud.google.com/apis/design/resource\_names of the form ``projects/{project_id}/regions/{region}`` template (Union[dict, ~google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Required. The workflow template to instantiate. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance_id (str): Deprecated. Please use ``request_id`` field instead. request_id (str): Optional. A tag that prevents multiple concurrent workflow instances with the same tag from running. This mitigates risk of concurrent instances started due to retries. It is recommended to always set this value to a `UUID <https://en.wikipedia.org/wiki/Universally_unique_identifier>`__. The tag must contain only letters (a-z, A-Z), numbers (0-9), underscores (\_), and hyphens (-). The maximum length is 40 characters. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Instantiates", "a", "template", "and", "begins", "execution", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py#L453-L569
train
googleapis/google-cloud-python
bigquery/google/cloud/bigquery/schema.py
_parse_schema_resource
def _parse_schema_resource(info): """Parse a resource fragment into a schema field. Args: info: (Mapping[str->dict]): should contain a "fields" key to be parsed Returns: (Union[Sequence[:class:`google.cloud.bigquery.schema.SchemaField`],None]) a list of parsed fields, or ``None`` if no "fields" key found. """ if "fields" not in info: return () schema = [] for r_field in info["fields"]: name = r_field["name"] field_type = r_field["type"] mode = r_field.get("mode", "NULLABLE") description = r_field.get("description") sub_fields = _parse_schema_resource(r_field) schema.append(SchemaField(name, field_type, mode, description, sub_fields)) return schema
python
def _parse_schema_resource(info): """Parse a resource fragment into a schema field. Args: info: (Mapping[str->dict]): should contain a "fields" key to be parsed Returns: (Union[Sequence[:class:`google.cloud.bigquery.schema.SchemaField`],None]) a list of parsed fields, or ``None`` if no "fields" key found. """ if "fields" not in info: return () schema = [] for r_field in info["fields"]: name = r_field["name"] field_type = r_field["type"] mode = r_field.get("mode", "NULLABLE") description = r_field.get("description") sub_fields = _parse_schema_resource(r_field) schema.append(SchemaField(name, field_type, mode, description, sub_fields)) return schema
[ "def", "_parse_schema_resource", "(", "info", ")", ":", "if", "\"fields\"", "not", "in", "info", ":", "return", "(", ")", "schema", "=", "[", "]", "for", "r_field", "in", "info", "[", "\"fields\"", "]", ":", "name", "=", "r_field", "[", "\"name\"", "]", "field_type", "=", "r_field", "[", "\"type\"", "]", "mode", "=", "r_field", ".", "get", "(", "\"mode\"", ",", "\"NULLABLE\"", ")", "description", "=", "r_field", ".", "get", "(", "\"description\"", ")", "sub_fields", "=", "_parse_schema_resource", "(", "r_field", ")", "schema", ".", "append", "(", "SchemaField", "(", "name", ",", "field_type", ",", "mode", ",", "description", ",", "sub_fields", ")", ")", "return", "schema" ]
Parse a resource fragment into a schema field. Args: info: (Mapping[str->dict]): should contain a "fields" key to be parsed Returns: (Union[Sequence[:class:`google.cloud.bigquery.schema.SchemaField`],None]) a list of parsed fields, or ``None`` if no "fields" key found.
[ "Parse", "a", "resource", "fragment", "into", "a", "schema", "field", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigquery/google/cloud/bigquery/schema.py#L164-L185
train
googleapis/google-cloud-python
bigquery/google/cloud/bigquery/schema.py
SchemaField.from_api_repr
def from_api_repr(cls, api_repr): """Return a ``SchemaField`` object deserialized from a dictionary. Args: api_repr (Mapping[str, str]): The serialized representation of the SchemaField, such as what is output by :meth:`to_api_repr`. Returns: google.cloud.biquery.schema.SchemaField: The ``SchemaField`` object. """ # Handle optional properties with default values mode = api_repr.get("mode", "NULLABLE") description = api_repr.get("description") fields = api_repr.get("fields", ()) return cls( field_type=api_repr["type"].upper(), fields=[cls.from_api_repr(f) for f in fields], mode=mode.upper(), description=description, name=api_repr["name"], )
python
def from_api_repr(cls, api_repr): """Return a ``SchemaField`` object deserialized from a dictionary. Args: api_repr (Mapping[str, str]): The serialized representation of the SchemaField, such as what is output by :meth:`to_api_repr`. Returns: google.cloud.biquery.schema.SchemaField: The ``SchemaField`` object. """ # Handle optional properties with default values mode = api_repr.get("mode", "NULLABLE") description = api_repr.get("description") fields = api_repr.get("fields", ()) return cls( field_type=api_repr["type"].upper(), fields=[cls.from_api_repr(f) for f in fields], mode=mode.upper(), description=description, name=api_repr["name"], )
[ "def", "from_api_repr", "(", "cls", ",", "api_repr", ")", ":", "# Handle optional properties with default values", "mode", "=", "api_repr", ".", "get", "(", "\"mode\"", ",", "\"NULLABLE\"", ")", "description", "=", "api_repr", ".", "get", "(", "\"description\"", ")", "fields", "=", "api_repr", ".", "get", "(", "\"fields\"", ",", "(", ")", ")", "return", "cls", "(", "field_type", "=", "api_repr", "[", "\"type\"", "]", ".", "upper", "(", ")", ",", "fields", "=", "[", "cls", ".", "from_api_repr", "(", "f", ")", "for", "f", "in", "fields", "]", ",", "mode", "=", "mode", ".", "upper", "(", ")", ",", "description", "=", "description", ",", "name", "=", "api_repr", "[", "\"name\"", "]", ",", ")" ]
Return a ``SchemaField`` object deserialized from a dictionary. Args: api_repr (Mapping[str, str]): The serialized representation of the SchemaField, such as what is output by :meth:`to_api_repr`. Returns: google.cloud.biquery.schema.SchemaField: The ``SchemaField`` object.
[ "Return", "a", "SchemaField", "object", "deserialized", "from", "a", "dictionary", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigquery/google/cloud/bigquery/schema.py#L44-L66
train
googleapis/google-cloud-python
bigquery/google/cloud/bigquery/schema.py
SchemaField.to_api_repr
def to_api_repr(self): """Return a dictionary representing this schema field. Returns: dict: A dictionary representing the SchemaField in a serialized form. """ # Put together the basic representation. See http://bit.ly/2hOAT5u. answer = { "mode": self.mode.upper(), "name": self.name, "type": self.field_type.upper(), "description": self.description, } # If this is a RECORD type, then sub-fields are also included, # add this to the serialized representation. if self.field_type.upper() == "RECORD": answer["fields"] = [f.to_api_repr() for f in self.fields] # Done; return the serialized dictionary. return answer
python
def to_api_repr(self): """Return a dictionary representing this schema field. Returns: dict: A dictionary representing the SchemaField in a serialized form. """ # Put together the basic representation. See http://bit.ly/2hOAT5u. answer = { "mode": self.mode.upper(), "name": self.name, "type": self.field_type.upper(), "description": self.description, } # If this is a RECORD type, then sub-fields are also included, # add this to the serialized representation. if self.field_type.upper() == "RECORD": answer["fields"] = [f.to_api_repr() for f in self.fields] # Done; return the serialized dictionary. return answer
[ "def", "to_api_repr", "(", "self", ")", ":", "# Put together the basic representation. See http://bit.ly/2hOAT5u.", "answer", "=", "{", "\"mode\"", ":", "self", ".", "mode", ".", "upper", "(", ")", ",", "\"name\"", ":", "self", ".", "name", ",", "\"type\"", ":", "self", ".", "field_type", ".", "upper", "(", ")", ",", "\"description\"", ":", "self", ".", "description", ",", "}", "# If this is a RECORD type, then sub-fields are also included,", "# add this to the serialized representation.", "if", "self", ".", "field_type", ".", "upper", "(", ")", "==", "\"RECORD\"", ":", "answer", "[", "\"fields\"", "]", "=", "[", "f", ".", "to_api_repr", "(", ")", "for", "f", "in", "self", ".", "fields", "]", "# Done; return the serialized dictionary.", "return", "answer" ]
Return a dictionary representing this schema field. Returns: dict: A dictionary representing the SchemaField in a serialized form.
[ "Return", "a", "dictionary", "representing", "this", "schema", "field", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigquery/google/cloud/bigquery/schema.py#L109-L130
train
googleapis/google-cloud-python
bigquery/google/cloud/bigquery/schema.py
SchemaField._key
def _key(self): """A tuple key that uniquely describes this field. Used to compute this instance's hashcode and evaluate equality. Returns: tuple: The contents of this :class:`~google.cloud.bigquery.schema.SchemaField`. """ return ( self._name, self._field_type.upper(), self._mode.upper(), self._description, self._fields, )
python
def _key(self): """A tuple key that uniquely describes this field. Used to compute this instance's hashcode and evaluate equality. Returns: tuple: The contents of this :class:`~google.cloud.bigquery.schema.SchemaField`. """ return ( self._name, self._field_type.upper(), self._mode.upper(), self._description, self._fields, )
[ "def", "_key", "(", "self", ")", ":", "return", "(", "self", ".", "_name", ",", "self", ".", "_field_type", ".", "upper", "(", ")", ",", "self", ".", "_mode", ".", "upper", "(", ")", ",", "self", ".", "_description", ",", "self", ".", "_fields", ",", ")" ]
A tuple key that uniquely describes this field. Used to compute this instance's hashcode and evaluate equality. Returns: tuple: The contents of this :class:`~google.cloud.bigquery.schema.SchemaField`.
[ "A", "tuple", "key", "that", "uniquely", "describes", "this", "field", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigquery/google/cloud/bigquery/schema.py#L132-L147
train
googleapis/google-cloud-python
spanner/benchmark/ycsb.py
parse_options
def parse_options(): """Parses options.""" parser = argparse.ArgumentParser() parser.add_argument('command', help='The YCSB command.') parser.add_argument('benchmark', help='The YCSB benchmark.') parser.add_argument('-P', '--workload', action='store', dest='workload', default='', help='The path to a YCSB workload file.') parser.add_argument('-p', '--parameter', action='append', dest='parameters', default=[], help='The key=value pair of parameter.') parser.add_argument('-b', '--num_bucket', action='store', type=int, dest='num_bucket', default=1000, help='The number of buckets in output.') args = parser.parse_args() parameters = {} parameters['command'] = args.command parameters['num_bucket'] = args.num_bucket for parameter in args.parameters: parts = parameter.strip().split('=') parameters[parts[0]] = parts[1] with open(args.workload, 'r') as f: for line in f.readlines(): parts = line.split('=') key = parts[0].strip() if key in OPERATIONS: parameters[key] = parts[1].strip() return parameters
python
def parse_options(): """Parses options.""" parser = argparse.ArgumentParser() parser.add_argument('command', help='The YCSB command.') parser.add_argument('benchmark', help='The YCSB benchmark.') parser.add_argument('-P', '--workload', action='store', dest='workload', default='', help='The path to a YCSB workload file.') parser.add_argument('-p', '--parameter', action='append', dest='parameters', default=[], help='The key=value pair of parameter.') parser.add_argument('-b', '--num_bucket', action='store', type=int, dest='num_bucket', default=1000, help='The number of buckets in output.') args = parser.parse_args() parameters = {} parameters['command'] = args.command parameters['num_bucket'] = args.num_bucket for parameter in args.parameters: parts = parameter.strip().split('=') parameters[parts[0]] = parts[1] with open(args.workload, 'r') as f: for line in f.readlines(): parts = line.split('=') key = parts[0].strip() if key in OPERATIONS: parameters[key] = parts[1].strip() return parameters
[ "def", "parse_options", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'command'", ",", "help", "=", "'The YCSB command.'", ")", "parser", ".", "add_argument", "(", "'benchmark'", ",", "help", "=", "'The YCSB benchmark.'", ")", "parser", ".", "add_argument", "(", "'-P'", ",", "'--workload'", ",", "action", "=", "'store'", ",", "dest", "=", "'workload'", ",", "default", "=", "''", ",", "help", "=", "'The path to a YCSB workload file.'", ")", "parser", ".", "add_argument", "(", "'-p'", ",", "'--parameter'", ",", "action", "=", "'append'", ",", "dest", "=", "'parameters'", ",", "default", "=", "[", "]", ",", "help", "=", "'The key=value pair of parameter.'", ")", "parser", ".", "add_argument", "(", "'-b'", ",", "'--num_bucket'", ",", "action", "=", "'store'", ",", "type", "=", "int", ",", "dest", "=", "'num_bucket'", ",", "default", "=", "1000", ",", "help", "=", "'The number of buckets in output.'", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "parameters", "=", "{", "}", "parameters", "[", "'command'", "]", "=", "args", ".", "command", "parameters", "[", "'num_bucket'", "]", "=", "args", ".", "num_bucket", "for", "parameter", "in", "args", ".", "parameters", ":", "parts", "=", "parameter", ".", "strip", "(", ")", ".", "split", "(", "'='", ")", "parameters", "[", "parts", "[", "0", "]", "]", "=", "parts", "[", "1", "]", "with", "open", "(", "args", ".", "workload", ",", "'r'", ")", "as", "f", ":", "for", "line", "in", "f", ".", "readlines", "(", ")", ":", "parts", "=", "line", ".", "split", "(", "'='", ")", "key", "=", "parts", "[", "0", "]", ".", "strip", "(", ")", "if", "key", "in", "OPERATIONS", ":", "parameters", "[", "key", "]", "=", "parts", "[", "1", "]", ".", "strip", "(", ")", "return", "parameters" ]
Parses options.
[ "Parses", "options", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/benchmark/ycsb.py#L51-L81
train
googleapis/google-cloud-python
spanner/benchmark/ycsb.py
open_database
def open_database(parameters): """Opens a database specified by the parameters from parse_options().""" spanner_client = spanner.Client() instance_id = parameters['cloudspanner.instance'] instance = spanner_client.instance(instance_id) database_id = parameters['cloudspanner.database'] pool = spanner.BurstyPool(int(parameters['num_worker'])) database = instance.database(database_id, pool=pool) return database
python
def open_database(parameters): """Opens a database specified by the parameters from parse_options().""" spanner_client = spanner.Client() instance_id = parameters['cloudspanner.instance'] instance = spanner_client.instance(instance_id) database_id = parameters['cloudspanner.database'] pool = spanner.BurstyPool(int(parameters['num_worker'])) database = instance.database(database_id, pool=pool) return database
[ "def", "open_database", "(", "parameters", ")", ":", "spanner_client", "=", "spanner", ".", "Client", "(", ")", "instance_id", "=", "parameters", "[", "'cloudspanner.instance'", "]", "instance", "=", "spanner_client", ".", "instance", "(", "instance_id", ")", "database_id", "=", "parameters", "[", "'cloudspanner.database'", "]", "pool", "=", "spanner", ".", "BurstyPool", "(", "int", "(", "parameters", "[", "'num_worker'", "]", ")", ")", "database", "=", "instance", ".", "database", "(", "database_id", ",", "pool", "=", "pool", ")", "return", "database" ]
Opens a database specified by the parameters from parse_options().
[ "Opens", "a", "database", "specified", "by", "the", "parameters", "from", "parse_options", "()", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/benchmark/ycsb.py#L84-L93
train
googleapis/google-cloud-python
spanner/benchmark/ycsb.py
load_keys
def load_keys(database, parameters): """Loads keys from database.""" keys = [] with database.snapshot() as snapshot: results = snapshot.execute_sql( 'SELECT u.id FROM %s u' % parameters['table']) for row in results: keys.append(row[0]) return keys
python
def load_keys(database, parameters): """Loads keys from database.""" keys = [] with database.snapshot() as snapshot: results = snapshot.execute_sql( 'SELECT u.id FROM %s u' % parameters['table']) for row in results: keys.append(row[0]) return keys
[ "def", "load_keys", "(", "database", ",", "parameters", ")", ":", "keys", "=", "[", "]", "with", "database", ".", "snapshot", "(", ")", "as", "snapshot", ":", "results", "=", "snapshot", ".", "execute_sql", "(", "'SELECT u.id FROM %s u'", "%", "parameters", "[", "'table'", "]", ")", "for", "row", "in", "results", ":", "keys", ".", "append", "(", "row", "[", "0", "]", ")", "return", "keys" ]
Loads keys from database.
[ "Loads", "keys", "from", "database", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/benchmark/ycsb.py#L96-L106
train
googleapis/google-cloud-python
spanner/benchmark/ycsb.py
read
def read(database, table, key): """Does a single read operation.""" with database.snapshot() as snapshot: result = snapshot.execute_sql('SELECT u.* FROM %s u WHERE u.id="%s"' % (table, key)) for row in result: key = row[0] for i in range(NUM_FIELD): field = row[i + 1]
python
def read(database, table, key): """Does a single read operation.""" with database.snapshot() as snapshot: result = snapshot.execute_sql('SELECT u.* FROM %s u WHERE u.id="%s"' % (table, key)) for row in result: key = row[0] for i in range(NUM_FIELD): field = row[i + 1]
[ "def", "read", "(", "database", ",", "table", ",", "key", ")", ":", "with", "database", ".", "snapshot", "(", ")", "as", "snapshot", ":", "result", "=", "snapshot", ".", "execute_sql", "(", "'SELECT u.* FROM %s u WHERE u.id=\"%s\"'", "%", "(", "table", ",", "key", ")", ")", "for", "row", "in", "result", ":", "key", "=", "row", "[", "0", "]", "for", "i", "in", "range", "(", "NUM_FIELD", ")", ":", "field", "=", "row", "[", "i", "+", "1", "]" ]
Does a single read operation.
[ "Does", "a", "single", "read", "operation", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/benchmark/ycsb.py#L109-L117
train
googleapis/google-cloud-python
spanner/benchmark/ycsb.py
update
def update(database, table, key): """Does a single update operation.""" field = random.randrange(10) value = ''.join(random.choice(string.printable) for i in range(100)) with database.batch() as batch: batch.update(table=table, columns=('id', 'field%d' % field), values=[(key, value)])
python
def update(database, table, key): """Does a single update operation.""" field = random.randrange(10) value = ''.join(random.choice(string.printable) for i in range(100)) with database.batch() as batch: batch.update(table=table, columns=('id', 'field%d' % field), values=[(key, value)])
[ "def", "update", "(", "database", ",", "table", ",", "key", ")", ":", "field", "=", "random", ".", "randrange", "(", "10", ")", "value", "=", "''", ".", "join", "(", "random", ".", "choice", "(", "string", ".", "printable", ")", "for", "i", "in", "range", "(", "100", ")", ")", "with", "database", ".", "batch", "(", ")", "as", "batch", ":", "batch", ".", "update", "(", "table", "=", "table", ",", "columns", "=", "(", "'id'", ",", "'field%d'", "%", "field", ")", ",", "values", "=", "[", "(", "key", ",", "value", ")", "]", ")" ]
Does a single update operation.
[ "Does", "a", "single", "update", "operation", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/benchmark/ycsb.py#L120-L126
train
googleapis/google-cloud-python
spanner/benchmark/ycsb.py
do_operation
def do_operation(database, keys, table, operation, latencies_ms): """Does a single operation and records latency.""" key = random.choice(keys) start = timeit.default_timer() if operation == 'read': read(database, table, key) elif operation == 'update': update(database, table, key) else: raise ValueError('Unknown operation: %s' % operation) end = timeit.default_timer() latencies_ms[operation].append((end - start) * 1000)
python
def do_operation(database, keys, table, operation, latencies_ms): """Does a single operation and records latency.""" key = random.choice(keys) start = timeit.default_timer() if operation == 'read': read(database, table, key) elif operation == 'update': update(database, table, key) else: raise ValueError('Unknown operation: %s' % operation) end = timeit.default_timer() latencies_ms[operation].append((end - start) * 1000)
[ "def", "do_operation", "(", "database", ",", "keys", ",", "table", ",", "operation", ",", "latencies_ms", ")", ":", "key", "=", "random", ".", "choice", "(", "keys", ")", "start", "=", "timeit", ".", "default_timer", "(", ")", "if", "operation", "==", "'read'", ":", "read", "(", "database", ",", "table", ",", "key", ")", "elif", "operation", "==", "'update'", ":", "update", "(", "database", ",", "table", ",", "key", ")", "else", ":", "raise", "ValueError", "(", "'Unknown operation: %s'", "%", "operation", ")", "end", "=", "timeit", ".", "default_timer", "(", ")", "latencies_ms", "[", "operation", "]", ".", "append", "(", "(", "end", "-", "start", ")", "*", "1000", ")" ]
Does a single operation and records latency.
[ "Does", "a", "single", "operation", "and", "records", "latency", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/benchmark/ycsb.py#L129-L140
train
googleapis/google-cloud-python
spanner/benchmark/ycsb.py
aggregate_metrics
def aggregate_metrics(latencies_ms, duration_ms, num_bucket): """Aggregates metrics.""" overall_op_count = 0 op_counts = {operation : len(latency) for operation, latency in latencies_ms.iteritems()} overall_op_count = sum([op_count for op_count in op_counts.itervalues()]) print('[OVERALL], RunTime(ms), %f' % duration_ms) print('[OVERALL], Throughput(ops/sec), %f' % (float(overall_op_count) / duration_ms * 1000.0)) for operation in op_counts.keys(): operation_upper = operation.upper() print('[%s], Operations, %d' % (operation_upper, op_counts[operation])) print('[%s], AverageLatency(us), %f' % ( operation_upper, numpy.average(latencies_ms[operation]) * 1000.0)) print('[%s], LatencyVariance(us), %f' % ( operation_upper, numpy.var(latencies_ms[operation]) * 1000.0)) print('[%s], MinLatency(us), %f' % ( operation_upper, min(latencies_ms[operation]) * 1000.0)) print('[%s], MaxLatency(us), %f' % ( operation_upper, max(latencies_ms[operation]) * 1000.0)) print('[%s], 95thPercentileLatency(us), %f' % ( operation_upper, numpy.percentile(latencies_ms[operation], 95.0) * 1000.0)) print('[%s], 99thPercentileLatency(us), %f' % ( operation_upper, numpy.percentile(latencies_ms[operation], 99.0) * 1000.0)) print('[%s], 99.9thPercentileLatency(us), %f' % ( operation_upper, numpy.percentile(latencies_ms[operation], 99.9) * 1000.0)) print('[%s], Return=OK, %d' % (operation_upper, op_counts[operation])) latency_array = numpy.array(latencies_ms[operation]) for j in range(num_bucket): print('[%s], %d, %d' % ( operation_upper, j, ((j <= latency_array) & (latency_array < (j + 1))).sum())) print('[%s], >%d, %d' % (operation_upper, num_bucket, (num_bucket <= latency_array).sum()))
python
def aggregate_metrics(latencies_ms, duration_ms, num_bucket): """Aggregates metrics.""" overall_op_count = 0 op_counts = {operation : len(latency) for operation, latency in latencies_ms.iteritems()} overall_op_count = sum([op_count for op_count in op_counts.itervalues()]) print('[OVERALL], RunTime(ms), %f' % duration_ms) print('[OVERALL], Throughput(ops/sec), %f' % (float(overall_op_count) / duration_ms * 1000.0)) for operation in op_counts.keys(): operation_upper = operation.upper() print('[%s], Operations, %d' % (operation_upper, op_counts[operation])) print('[%s], AverageLatency(us), %f' % ( operation_upper, numpy.average(latencies_ms[operation]) * 1000.0)) print('[%s], LatencyVariance(us), %f' % ( operation_upper, numpy.var(latencies_ms[operation]) * 1000.0)) print('[%s], MinLatency(us), %f' % ( operation_upper, min(latencies_ms[operation]) * 1000.0)) print('[%s], MaxLatency(us), %f' % ( operation_upper, max(latencies_ms[operation]) * 1000.0)) print('[%s], 95thPercentileLatency(us), %f' % ( operation_upper, numpy.percentile(latencies_ms[operation], 95.0) * 1000.0)) print('[%s], 99thPercentileLatency(us), %f' % ( operation_upper, numpy.percentile(latencies_ms[operation], 99.0) * 1000.0)) print('[%s], 99.9thPercentileLatency(us), %f' % ( operation_upper, numpy.percentile(latencies_ms[operation], 99.9) * 1000.0)) print('[%s], Return=OK, %d' % (operation_upper, op_counts[operation])) latency_array = numpy.array(latencies_ms[operation]) for j in range(num_bucket): print('[%s], %d, %d' % ( operation_upper, j, ((j <= latency_array) & (latency_array < (j + 1))).sum())) print('[%s], >%d, %d' % (operation_upper, num_bucket, (num_bucket <= latency_array).sum()))
[ "def", "aggregate_metrics", "(", "latencies_ms", ",", "duration_ms", ",", "num_bucket", ")", ":", "overall_op_count", "=", "0", "op_counts", "=", "{", "operation", ":", "len", "(", "latency", ")", "for", "operation", ",", "latency", "in", "latencies_ms", ".", "iteritems", "(", ")", "}", "overall_op_count", "=", "sum", "(", "[", "op_count", "for", "op_count", "in", "op_counts", ".", "itervalues", "(", ")", "]", ")", "print", "(", "'[OVERALL], RunTime(ms), %f'", "%", "duration_ms", ")", "print", "(", "'[OVERALL], Throughput(ops/sec), %f'", "%", "(", "float", "(", "overall_op_count", ")", "/", "duration_ms", "*", "1000.0", ")", ")", "for", "operation", "in", "op_counts", ".", "keys", "(", ")", ":", "operation_upper", "=", "operation", ".", "upper", "(", ")", "print", "(", "'[%s], Operations, %d'", "%", "(", "operation_upper", ",", "op_counts", "[", "operation", "]", ")", ")", "print", "(", "'[%s], AverageLatency(us), %f'", "%", "(", "operation_upper", ",", "numpy", ".", "average", "(", "latencies_ms", "[", "operation", "]", ")", "*", "1000.0", ")", ")", "print", "(", "'[%s], LatencyVariance(us), %f'", "%", "(", "operation_upper", ",", "numpy", ".", "var", "(", "latencies_ms", "[", "operation", "]", ")", "*", "1000.0", ")", ")", "print", "(", "'[%s], MinLatency(us), %f'", "%", "(", "operation_upper", ",", "min", "(", "latencies_ms", "[", "operation", "]", ")", "*", "1000.0", ")", ")", "print", "(", "'[%s], MaxLatency(us), %f'", "%", "(", "operation_upper", ",", "max", "(", "latencies_ms", "[", "operation", "]", ")", "*", "1000.0", ")", ")", "print", "(", "'[%s], 95thPercentileLatency(us), %f'", "%", "(", "operation_upper", ",", "numpy", ".", "percentile", "(", "latencies_ms", "[", "operation", "]", ",", "95.0", ")", "*", "1000.0", ")", ")", "print", "(", "'[%s], 99thPercentileLatency(us), %f'", "%", "(", "operation_upper", ",", "numpy", ".", "percentile", "(", "latencies_ms", "[", "operation", "]", ",", "99.0", ")", "*", "1000.0", ")", ")", "print", "(", "'[%s], 99.9thPercentileLatency(us), %f'", "%", "(", "operation_upper", ",", "numpy", ".", "percentile", "(", "latencies_ms", "[", "operation", "]", ",", "99.9", ")", "*", "1000.0", ")", ")", "print", "(", "'[%s], Return=OK, %d'", "%", "(", "operation_upper", ",", "op_counts", "[", "operation", "]", ")", ")", "latency_array", "=", "numpy", ".", "array", "(", "latencies_ms", "[", "operation", "]", ")", "for", "j", "in", "range", "(", "num_bucket", ")", ":", "print", "(", "'[%s], %d, %d'", "%", "(", "operation_upper", ",", "j", ",", "(", "(", "j", "<=", "latency_array", ")", "&", "(", "latency_array", "<", "(", "j", "+", "1", ")", ")", ")", ".", "sum", "(", ")", ")", ")", "print", "(", "'[%s], >%d, %d'", "%", "(", "operation_upper", ",", "num_bucket", ",", "(", "num_bucket", "<=", "latency_array", ")", ".", "sum", "(", ")", ")", ")" ]
Aggregates metrics.
[ "Aggregates", "metrics", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/benchmark/ycsb.py#L143-L181
train
googleapis/google-cloud-python
spanner/benchmark/ycsb.py
run_workload
def run_workload(database, keys, parameters): """Runs workload against the database.""" total_weight = 0.0 weights = [] operations = [] latencies_ms = {} for operation in OPERATIONS: weight = float(parameters[operation]) if weight <= 0.0: continue total_weight += weight op_code = operation.split('proportion')[0] operations.append(op_code) weights.append(total_weight) latencies_ms[op_code] = [] threads = [] start = timeit.default_timer() for i in range(int(parameters['num_worker'])): thread = WorkloadThread(database, keys, parameters, total_weight, weights, operations) thread.start() threads.append(thread) for thread in threads: thread.join() end = timeit.default_timer() for thread in threads: thread_latencies_ms = thread.latencies_ms() for key in latencies_ms.keys(): latencies_ms[key].extend(thread_latencies_ms[key]) aggregate_metrics(latencies_ms, (end - start) * 1000.0, parameters['num_bucket'])
python
def run_workload(database, keys, parameters): """Runs workload against the database.""" total_weight = 0.0 weights = [] operations = [] latencies_ms = {} for operation in OPERATIONS: weight = float(parameters[operation]) if weight <= 0.0: continue total_weight += weight op_code = operation.split('proportion')[0] operations.append(op_code) weights.append(total_weight) latencies_ms[op_code] = [] threads = [] start = timeit.default_timer() for i in range(int(parameters['num_worker'])): thread = WorkloadThread(database, keys, parameters, total_weight, weights, operations) thread.start() threads.append(thread) for thread in threads: thread.join() end = timeit.default_timer() for thread in threads: thread_latencies_ms = thread.latencies_ms() for key in latencies_ms.keys(): latencies_ms[key].extend(thread_latencies_ms[key]) aggregate_metrics(latencies_ms, (end - start) * 1000.0, parameters['num_bucket'])
[ "def", "run_workload", "(", "database", ",", "keys", ",", "parameters", ")", ":", "total_weight", "=", "0.0", "weights", "=", "[", "]", "operations", "=", "[", "]", "latencies_ms", "=", "{", "}", "for", "operation", "in", "OPERATIONS", ":", "weight", "=", "float", "(", "parameters", "[", "operation", "]", ")", "if", "weight", "<=", "0.0", ":", "continue", "total_weight", "+=", "weight", "op_code", "=", "operation", ".", "split", "(", "'proportion'", ")", "[", "0", "]", "operations", ".", "append", "(", "op_code", ")", "weights", ".", "append", "(", "total_weight", ")", "latencies_ms", "[", "op_code", "]", "=", "[", "]", "threads", "=", "[", "]", "start", "=", "timeit", ".", "default_timer", "(", ")", "for", "i", "in", "range", "(", "int", "(", "parameters", "[", "'num_worker'", "]", ")", ")", ":", "thread", "=", "WorkloadThread", "(", "database", ",", "keys", ",", "parameters", ",", "total_weight", ",", "weights", ",", "operations", ")", "thread", ".", "start", "(", ")", "threads", ".", "append", "(", "thread", ")", "for", "thread", "in", "threads", ":", "thread", ".", "join", "(", ")", "end", "=", "timeit", ".", "default_timer", "(", ")", "for", "thread", "in", "threads", ":", "thread_latencies_ms", "=", "thread", ".", "latencies_ms", "(", ")", "for", "key", "in", "latencies_ms", ".", "keys", "(", ")", ":", "latencies_ms", "[", "key", "]", ".", "extend", "(", "thread_latencies_ms", "[", "key", "]", ")", "aggregate_metrics", "(", "latencies_ms", ",", "(", "end", "-", "start", ")", "*", "1000.0", ",", "parameters", "[", "'num_bucket'", "]", ")" ]
Runs workload against the database.
[ "Runs", "workload", "against", "the", "database", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/benchmark/ycsb.py#L219-L253
train
googleapis/google-cloud-python
spanner/benchmark/ycsb.py
WorkloadThread.run
def run(self): """Run a single thread of the workload.""" i = 0 operation_count = int(self._parameters['operationcount']) while i < operation_count: i += 1 weight = random.uniform(0, self._total_weight) for j in range(len(self._weights)): if weight <= self._weights[j]: do_operation(self._database, self._keys, self._parameters['table'], self._operations[j], self._latencies_ms) break
python
def run(self): """Run a single thread of the workload.""" i = 0 operation_count = int(self._parameters['operationcount']) while i < operation_count: i += 1 weight = random.uniform(0, self._total_weight) for j in range(len(self._weights)): if weight <= self._weights[j]: do_operation(self._database, self._keys, self._parameters['table'], self._operations[j], self._latencies_ms) break
[ "def", "run", "(", "self", ")", ":", "i", "=", "0", "operation_count", "=", "int", "(", "self", ".", "_parameters", "[", "'operationcount'", "]", ")", "while", "i", "<", "operation_count", ":", "i", "+=", "1", "weight", "=", "random", ".", "uniform", "(", "0", ",", "self", ".", "_total_weight", ")", "for", "j", "in", "range", "(", "len", "(", "self", ".", "_weights", ")", ")", ":", "if", "weight", "<=", "self", ".", "_weights", "[", "j", "]", ":", "do_operation", "(", "self", ".", "_database", ",", "self", ".", "_keys", ",", "self", ".", "_parameters", "[", "'table'", "]", ",", "self", ".", "_operations", "[", "j", "]", ",", "self", ".", "_latencies_ms", ")", "break" ]
Run a single thread of the workload.
[ "Run", "a", "single", "thread", "of", "the", "workload", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/benchmark/ycsb.py#L200-L212
train
googleapis/google-cloud-python
pubsub/google/cloud/pubsub_v1/_gapic.py
add_methods
def add_methods(source_class, blacklist=()): """Add wrapped versions of the `api` member's methods to the class. Any methods passed in `blacklist` are not added. Additionally, any methods explicitly defined on the wrapped class are not added. """ def wrap(wrapped_fx): """Wrap a GAPIC method; preserve its name and docstring.""" # If this is a static or class method, then we need to *not* # send self as the first argument. # # Similarly, for instance methods, we need to send self.api rather # than self, since that is where the actual methods were declared. instance_method = True # If this is a bound method it's a classmethod. self = getattr(wrapped_fx, "__self__", None) if issubclass(type(self), type): instance_method = False # Okay, we have figured out what kind of method this is; send # down the correct wrapper function. if instance_method: fx = lambda self, *a, **kw: wrapped_fx(self.api, *a, **kw) # noqa return functools.wraps(wrapped_fx)(fx) fx = lambda *a, **kw: wrapped_fx(*a, **kw) # noqa return staticmethod(functools.wraps(wrapped_fx)(fx)) def actual_decorator(cls): # Reflectively iterate over most of the methods on the source class # (the GAPIC) and make wrapped versions available on this client. for name in dir(source_class): # Ignore all private and magic methods. if name.startswith("_"): continue # Ignore anything on our blacklist. if name in blacklist: continue # Retrieve the attribute, and ignore it if it is not callable. attr = getattr(source_class, name) if not callable(attr): continue # Add a wrapper method to this object. fx = wrap(getattr(source_class, name)) setattr(cls, name, fx) # Return the augmented class. return cls # Simply return the actual decorator; this is returned from this method # and actually used to decorate the class. return actual_decorator
python
def add_methods(source_class, blacklist=()): """Add wrapped versions of the `api` member's methods to the class. Any methods passed in `blacklist` are not added. Additionally, any methods explicitly defined on the wrapped class are not added. """ def wrap(wrapped_fx): """Wrap a GAPIC method; preserve its name and docstring.""" # If this is a static or class method, then we need to *not* # send self as the first argument. # # Similarly, for instance methods, we need to send self.api rather # than self, since that is where the actual methods were declared. instance_method = True # If this is a bound method it's a classmethod. self = getattr(wrapped_fx, "__self__", None) if issubclass(type(self), type): instance_method = False # Okay, we have figured out what kind of method this is; send # down the correct wrapper function. if instance_method: fx = lambda self, *a, **kw: wrapped_fx(self.api, *a, **kw) # noqa return functools.wraps(wrapped_fx)(fx) fx = lambda *a, **kw: wrapped_fx(*a, **kw) # noqa return staticmethod(functools.wraps(wrapped_fx)(fx)) def actual_decorator(cls): # Reflectively iterate over most of the methods on the source class # (the GAPIC) and make wrapped versions available on this client. for name in dir(source_class): # Ignore all private and magic methods. if name.startswith("_"): continue # Ignore anything on our blacklist. if name in blacklist: continue # Retrieve the attribute, and ignore it if it is not callable. attr = getattr(source_class, name) if not callable(attr): continue # Add a wrapper method to this object. fx = wrap(getattr(source_class, name)) setattr(cls, name, fx) # Return the augmented class. return cls # Simply return the actual decorator; this is returned from this method # and actually used to decorate the class. return actual_decorator
[ "def", "add_methods", "(", "source_class", ",", "blacklist", "=", "(", ")", ")", ":", "def", "wrap", "(", "wrapped_fx", ")", ":", "\"\"\"Wrap a GAPIC method; preserve its name and docstring.\"\"\"", "# If this is a static or class method, then we need to *not*", "# send self as the first argument.", "#", "# Similarly, for instance methods, we need to send self.api rather", "# than self, since that is where the actual methods were declared.", "instance_method", "=", "True", "# If this is a bound method it's a classmethod.", "self", "=", "getattr", "(", "wrapped_fx", ",", "\"__self__\"", ",", "None", ")", "if", "issubclass", "(", "type", "(", "self", ")", ",", "type", ")", ":", "instance_method", "=", "False", "# Okay, we have figured out what kind of method this is; send", "# down the correct wrapper function.", "if", "instance_method", ":", "fx", "=", "lambda", "self", ",", "*", "a", ",", "*", "*", "kw", ":", "wrapped_fx", "(", "self", ".", "api", ",", "*", "a", ",", "*", "*", "kw", ")", "# noqa", "return", "functools", ".", "wraps", "(", "wrapped_fx", ")", "(", "fx", ")", "fx", "=", "lambda", "*", "a", ",", "*", "*", "kw", ":", "wrapped_fx", "(", "*", "a", ",", "*", "*", "kw", ")", "# noqa", "return", "staticmethod", "(", "functools", ".", "wraps", "(", "wrapped_fx", ")", "(", "fx", ")", ")", "def", "actual_decorator", "(", "cls", ")", ":", "# Reflectively iterate over most of the methods on the source class", "# (the GAPIC) and make wrapped versions available on this client.", "for", "name", "in", "dir", "(", "source_class", ")", ":", "# Ignore all private and magic methods.", "if", "name", ".", "startswith", "(", "\"_\"", ")", ":", "continue", "# Ignore anything on our blacklist.", "if", "name", "in", "blacklist", ":", "continue", "# Retrieve the attribute, and ignore it if it is not callable.", "attr", "=", "getattr", "(", "source_class", ",", "name", ")", "if", "not", "callable", "(", "attr", ")", ":", "continue", "# Add a wrapper method to this object.", "fx", "=", "wrap", "(", "getattr", "(", "source_class", ",", "name", ")", ")", "setattr", "(", "cls", ",", "name", ",", "fx", ")", "# Return the augmented class.", "return", "cls", "# Simply return the actual decorator; this is returned from this method", "# and actually used to decorate the class.", "return", "actual_decorator" ]
Add wrapped versions of the `api` member's methods to the class. Any methods passed in `blacklist` are not added. Additionally, any methods explicitly defined on the wrapped class are not added.
[ "Add", "wrapped", "versions", "of", "the", "api", "member", "s", "methods", "to", "the", "class", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/pubsub/google/cloud/pubsub_v1/_gapic.py#L20-L77
train
googleapis/google-cloud-python
vision/google/cloud/vision_v1/gapic/product_search_client.py
ProductSearchClient.create_product
def create_product( self, parent, product, product_id=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates and returns a new product resource. Possible errors: - Returns INVALID\_ARGUMENT if display\_name is missing or longer than 4096 characters. - Returns INVALID\_ARGUMENT if description is longer than 4096 characters. - Returns INVALID\_ARGUMENT if product\_category is missing or invalid. Example: >>> from google.cloud import vision_v1 >>> >>> client = vision_v1.ProductSearchClient() >>> >>> parent = client.location_path('[PROJECT]', '[LOCATION]') >>> >>> # TODO: Initialize `product`: >>> product = {} >>> >>> response = client.create_product(parent, product) Args: parent (str): The project in which the Product should be created. Format is ``projects/PROJECT_ID/locations/LOC_ID``. product (Union[dict, ~google.cloud.vision_v1.types.Product]): The product to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1.types.Product` product_id (str): A user-supplied resource id for this Product. If set, the server will attempt to use this value as the resource id. If it is already in use, an error is returned with code ALREADY\_EXISTS. Must be at most 128 characters long. It cannot contain the character ``/``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.vision_v1.types.Product` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "create_product" not in self._inner_api_calls: self._inner_api_calls[ "create_product" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_product, default_retry=self._method_configs["CreateProduct"].retry, default_timeout=self._method_configs["CreateProduct"].timeout, client_info=self._client_info, ) request = product_search_service_pb2.CreateProductRequest( parent=parent, product=product, product_id=product_id ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["create_product"]( request, retry=retry, timeout=timeout, metadata=metadata )
python
def create_product( self, parent, product, product_id=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates and returns a new product resource. Possible errors: - Returns INVALID\_ARGUMENT if display\_name is missing or longer than 4096 characters. - Returns INVALID\_ARGUMENT if description is longer than 4096 characters. - Returns INVALID\_ARGUMENT if product\_category is missing or invalid. Example: >>> from google.cloud import vision_v1 >>> >>> client = vision_v1.ProductSearchClient() >>> >>> parent = client.location_path('[PROJECT]', '[LOCATION]') >>> >>> # TODO: Initialize `product`: >>> product = {} >>> >>> response = client.create_product(parent, product) Args: parent (str): The project in which the Product should be created. Format is ``projects/PROJECT_ID/locations/LOC_ID``. product (Union[dict, ~google.cloud.vision_v1.types.Product]): The product to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1.types.Product` product_id (str): A user-supplied resource id for this Product. If set, the server will attempt to use this value as the resource id. If it is already in use, an error is returned with code ALREADY\_EXISTS. Must be at most 128 characters long. It cannot contain the character ``/``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.vision_v1.types.Product` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "create_product" not in self._inner_api_calls: self._inner_api_calls[ "create_product" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_product, default_retry=self._method_configs["CreateProduct"].retry, default_timeout=self._method_configs["CreateProduct"].timeout, client_info=self._client_info, ) request = product_search_service_pb2.CreateProductRequest( parent=parent, product=product, product_id=product_id ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["create_product"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def", "create_product", "(", "self", ",", "parent", ",", "product", ",", "product_id", "=", "None", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"create_product\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"create_product\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "create_product", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"CreateProduct\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"CreateProduct\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "product_search_service_pb2", ".", "CreateProductRequest", "(", "parent", "=", "parent", ",", "product", "=", "product", ",", "product_id", "=", "product_id", ")", "if", "metadata", "is", "None", ":", "metadata", "=", "[", "]", "metadata", "=", "list", "(", "metadata", ")", "try", ":", "routing_header", "=", "[", "(", "\"parent\"", ",", "parent", ")", "]", "except", "AttributeError", ":", "pass", "else", ":", "routing_metadata", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "routing_header", ".", "to_grpc_metadata", "(", "routing_header", ")", "metadata", ".", "append", "(", "routing_metadata", ")", "return", "self", ".", "_inner_api_calls", "[", "\"create_product\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
Creates and returns a new product resource. Possible errors: - Returns INVALID\_ARGUMENT if display\_name is missing or longer than 4096 characters. - Returns INVALID\_ARGUMENT if description is longer than 4096 characters. - Returns INVALID\_ARGUMENT if product\_category is missing or invalid. Example: >>> from google.cloud import vision_v1 >>> >>> client = vision_v1.ProductSearchClient() >>> >>> parent = client.location_path('[PROJECT]', '[LOCATION]') >>> >>> # TODO: Initialize `product`: >>> product = {} >>> >>> response = client.create_product(parent, product) Args: parent (str): The project in which the Product should be created. Format is ``projects/PROJECT_ID/locations/LOC_ID``. product (Union[dict, ~google.cloud.vision_v1.types.Product]): The product to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1.types.Product` product_id (str): A user-supplied resource id for this Product. If set, the server will attempt to use this value as the resource id. If it is already in use, an error is returned with code ALREADY\_EXISTS. Must be at most 128 characters long. It cannot contain the character ``/``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.vision_v1.types.Product` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Creates", "and", "returns", "a", "new", "product", "resource", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/vision/google/cloud/vision_v1/gapic/product_search_client.py#L230-L322
train
googleapis/google-cloud-python
vision/google/cloud/vision_v1/gapic/product_search_client.py
ProductSearchClient.update_product
def update_product( self, product, update_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Makes changes to a Product resource. Only the ``display_name``, ``description``, and ``labels`` fields can be updated right now. If labels are updated, the change will not be reflected in queries until the next index time. Possible errors: - Returns NOT\_FOUND if the Product does not exist. - Returns INVALID\_ARGUMENT if display\_name is present in update\_mask but is missing from the request or longer than 4096 characters. - Returns INVALID\_ARGUMENT if description is present in update\_mask but is longer than 4096 characters. - Returns INVALID\_ARGUMENT if product\_category is present in update\_mask. Example: >>> from google.cloud import vision_v1 >>> >>> client = vision_v1.ProductSearchClient() >>> >>> # TODO: Initialize `product`: >>> product = {} >>> >>> response = client.update_product(product) Args: product (Union[dict, ~google.cloud.vision_v1.types.Product]): The Product resource which replaces the one on the server. product.name is immutable. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1.types.Product` update_mask (Union[dict, ~google.cloud.vision_v1.types.FieldMask]): The ``FieldMask`` that specifies which fields to update. If update\_mask isn't specified, all mutable fields are to be updated. Valid mask paths include ``product_labels``, ``display_name``, and ``description``. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.vision_v1.types.Product` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "update_product" not in self._inner_api_calls: self._inner_api_calls[ "update_product" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_product, default_retry=self._method_configs["UpdateProduct"].retry, default_timeout=self._method_configs["UpdateProduct"].timeout, client_info=self._client_info, ) request = product_search_service_pb2.UpdateProductRequest( product=product, update_mask=update_mask ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("product.name", product.name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["update_product"]( request, retry=retry, timeout=timeout, metadata=metadata )
python
def update_product( self, product, update_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Makes changes to a Product resource. Only the ``display_name``, ``description``, and ``labels`` fields can be updated right now. If labels are updated, the change will not be reflected in queries until the next index time. Possible errors: - Returns NOT\_FOUND if the Product does not exist. - Returns INVALID\_ARGUMENT if display\_name is present in update\_mask but is missing from the request or longer than 4096 characters. - Returns INVALID\_ARGUMENT if description is present in update\_mask but is longer than 4096 characters. - Returns INVALID\_ARGUMENT if product\_category is present in update\_mask. Example: >>> from google.cloud import vision_v1 >>> >>> client = vision_v1.ProductSearchClient() >>> >>> # TODO: Initialize `product`: >>> product = {} >>> >>> response = client.update_product(product) Args: product (Union[dict, ~google.cloud.vision_v1.types.Product]): The Product resource which replaces the one on the server. product.name is immutable. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1.types.Product` update_mask (Union[dict, ~google.cloud.vision_v1.types.FieldMask]): The ``FieldMask`` that specifies which fields to update. If update\_mask isn't specified, all mutable fields are to be updated. Valid mask paths include ``product_labels``, ``display_name``, and ``description``. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.vision_v1.types.Product` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "update_product" not in self._inner_api_calls: self._inner_api_calls[ "update_product" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_product, default_retry=self._method_configs["UpdateProduct"].retry, default_timeout=self._method_configs["UpdateProduct"].timeout, client_info=self._client_info, ) request = product_search_service_pb2.UpdateProductRequest( product=product, update_mask=update_mask ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("product.name", product.name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["update_product"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def", "update_product", "(", "self", ",", "product", ",", "update_mask", "=", "None", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"update_product\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"update_product\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "update_product", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"UpdateProduct\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"UpdateProduct\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "product_search_service_pb2", ".", "UpdateProductRequest", "(", "product", "=", "product", ",", "update_mask", "=", "update_mask", ")", "if", "metadata", "is", "None", ":", "metadata", "=", "[", "]", "metadata", "=", "list", "(", "metadata", ")", "try", ":", "routing_header", "=", "[", "(", "\"product.name\"", ",", "product", ".", "name", ")", "]", "except", "AttributeError", ":", "pass", "else", ":", "routing_metadata", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "routing_header", ".", "to_grpc_metadata", "(", "routing_header", ")", "metadata", ".", "append", "(", "routing_metadata", ")", "return", "self", ".", "_inner_api_calls", "[", "\"update_product\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
Makes changes to a Product resource. Only the ``display_name``, ``description``, and ``labels`` fields can be updated right now. If labels are updated, the change will not be reflected in queries until the next index time. Possible errors: - Returns NOT\_FOUND if the Product does not exist. - Returns INVALID\_ARGUMENT if display\_name is present in update\_mask but is missing from the request or longer than 4096 characters. - Returns INVALID\_ARGUMENT if description is present in update\_mask but is longer than 4096 characters. - Returns INVALID\_ARGUMENT if product\_category is present in update\_mask. Example: >>> from google.cloud import vision_v1 >>> >>> client = vision_v1.ProductSearchClient() >>> >>> # TODO: Initialize `product`: >>> product = {} >>> >>> response = client.update_product(product) Args: product (Union[dict, ~google.cloud.vision_v1.types.Product]): The Product resource which replaces the one on the server. product.name is immutable. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1.types.Product` update_mask (Union[dict, ~google.cloud.vision_v1.types.FieldMask]): The ``FieldMask`` that specifies which fields to update. If update\_mask isn't specified, all mutable fields are to be updated. Valid mask paths include ``product_labels``, ``display_name``, and ``description``. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.vision_v1.types.Product` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Makes", "changes", "to", "a", "Product", "resource", ".", "Only", "the", "display_name", "description", "and", "labels", "fields", "can", "be", "updated", "right", "now", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/vision/google/cloud/vision_v1/gapic/product_search_client.py#L509-L604
train
googleapis/google-cloud-python
datastore/google/cloud/datastore/batch.py
_assign_entity_to_pb
def _assign_entity_to_pb(entity_pb, entity): """Copy ``entity`` into ``entity_pb``. Helper method for ``Batch.put``. :type entity_pb: :class:`.entity_pb2.Entity` :param entity_pb: The entity owned by a mutation. :type entity: :class:`google.cloud.datastore.entity.Entity` :param entity: The entity being updated within the batch / transaction. """ bare_entity_pb = helpers.entity_to_protobuf(entity) bare_entity_pb.key.CopyFrom(bare_entity_pb.key) entity_pb.CopyFrom(bare_entity_pb)
python
def _assign_entity_to_pb(entity_pb, entity): """Copy ``entity`` into ``entity_pb``. Helper method for ``Batch.put``. :type entity_pb: :class:`.entity_pb2.Entity` :param entity_pb: The entity owned by a mutation. :type entity: :class:`google.cloud.datastore.entity.Entity` :param entity: The entity being updated within the batch / transaction. """ bare_entity_pb = helpers.entity_to_protobuf(entity) bare_entity_pb.key.CopyFrom(bare_entity_pb.key) entity_pb.CopyFrom(bare_entity_pb)
[ "def", "_assign_entity_to_pb", "(", "entity_pb", ",", "entity", ")", ":", "bare_entity_pb", "=", "helpers", ".", "entity_to_protobuf", "(", "entity", ")", "bare_entity_pb", ".", "key", ".", "CopyFrom", "(", "bare_entity_pb", ".", "key", ")", "entity_pb", ".", "CopyFrom", "(", "bare_entity_pb", ")" ]
Copy ``entity`` into ``entity_pb``. Helper method for ``Batch.put``. :type entity_pb: :class:`.entity_pb2.Entity` :param entity_pb: The entity owned by a mutation. :type entity: :class:`google.cloud.datastore.entity.Entity` :param entity: The entity being updated within the batch / transaction.
[ "Copy", "entity", "into", "entity_pb", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/batch.py#L309-L322
train
googleapis/google-cloud-python
datastore/google/cloud/datastore/batch.py
_parse_commit_response
def _parse_commit_response(commit_response_pb): """Extract response data from a commit response. :type commit_response_pb: :class:`.datastore_pb2.CommitResponse` :param commit_response_pb: The protobuf response from a commit request. :rtype: tuple :returns: The pair of the number of index updates and a list of :class:`.entity_pb2.Key` for each incomplete key that was completed in the commit. """ mut_results = commit_response_pb.mutation_results index_updates = commit_response_pb.index_updates completed_keys = [ mut_result.key for mut_result in mut_results if mut_result.HasField("key") ] # Message field (Key) return index_updates, completed_keys
python
def _parse_commit_response(commit_response_pb): """Extract response data from a commit response. :type commit_response_pb: :class:`.datastore_pb2.CommitResponse` :param commit_response_pb: The protobuf response from a commit request. :rtype: tuple :returns: The pair of the number of index updates and a list of :class:`.entity_pb2.Key` for each incomplete key that was completed in the commit. """ mut_results = commit_response_pb.mutation_results index_updates = commit_response_pb.index_updates completed_keys = [ mut_result.key for mut_result in mut_results if mut_result.HasField("key") ] # Message field (Key) return index_updates, completed_keys
[ "def", "_parse_commit_response", "(", "commit_response_pb", ")", ":", "mut_results", "=", "commit_response_pb", ".", "mutation_results", "index_updates", "=", "commit_response_pb", ".", "index_updates", "completed_keys", "=", "[", "mut_result", ".", "key", "for", "mut_result", "in", "mut_results", "if", "mut_result", ".", "HasField", "(", "\"key\"", ")", "]", "# Message field (Key)", "return", "index_updates", ",", "completed_keys" ]
Extract response data from a commit response. :type commit_response_pb: :class:`.datastore_pb2.CommitResponse` :param commit_response_pb: The protobuf response from a commit request. :rtype: tuple :returns: The pair of the number of index updates and a list of :class:`.entity_pb2.Key` for each incomplete key that was completed in the commit.
[ "Extract", "response", "data", "from", "a", "commit", "response", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/batch.py#L325-L341
train
googleapis/google-cloud-python
datastore/google/cloud/datastore/batch.py
Batch._add_partial_key_entity_pb
def _add_partial_key_entity_pb(self): """Adds a new mutation for an entity with a partial key. :rtype: :class:`.entity_pb2.Entity` :returns: The newly created entity protobuf that will be updated and sent with a commit. """ new_mutation = _datastore_pb2.Mutation() self._mutations.append(new_mutation) return new_mutation.insert
python
def _add_partial_key_entity_pb(self): """Adds a new mutation for an entity with a partial key. :rtype: :class:`.entity_pb2.Entity` :returns: The newly created entity protobuf that will be updated and sent with a commit. """ new_mutation = _datastore_pb2.Mutation() self._mutations.append(new_mutation) return new_mutation.insert
[ "def", "_add_partial_key_entity_pb", "(", "self", ")", ":", "new_mutation", "=", "_datastore_pb2", ".", "Mutation", "(", ")", "self", ".", "_mutations", ".", "append", "(", "new_mutation", ")", "return", "new_mutation", ".", "insert" ]
Adds a new mutation for an entity with a partial key. :rtype: :class:`.entity_pb2.Entity` :returns: The newly created entity protobuf that will be updated and sent with a commit.
[ "Adds", "a", "new", "mutation", "for", "an", "entity", "with", "a", "partial", "key", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/batch.py#L107-L116
train
googleapis/google-cloud-python
datastore/google/cloud/datastore/batch.py
Batch._add_complete_key_entity_pb
def _add_complete_key_entity_pb(self): """Adds a new mutation for an entity with a completed key. :rtype: :class:`.entity_pb2.Entity` :returns: The newly created entity protobuf that will be updated and sent with a commit. """ # We use ``upsert`` for entities with completed keys, rather than # ``insert`` or ``update``, in order not to create race conditions # based on prior existence / removal of the entity. new_mutation = _datastore_pb2.Mutation() self._mutations.append(new_mutation) return new_mutation.upsert
python
def _add_complete_key_entity_pb(self): """Adds a new mutation for an entity with a completed key. :rtype: :class:`.entity_pb2.Entity` :returns: The newly created entity protobuf that will be updated and sent with a commit. """ # We use ``upsert`` for entities with completed keys, rather than # ``insert`` or ``update``, in order not to create race conditions # based on prior existence / removal of the entity. new_mutation = _datastore_pb2.Mutation() self._mutations.append(new_mutation) return new_mutation.upsert
[ "def", "_add_complete_key_entity_pb", "(", "self", ")", ":", "# We use ``upsert`` for entities with completed keys, rather than", "# ``insert`` or ``update``, in order not to create race conditions", "# based on prior existence / removal of the entity.", "new_mutation", "=", "_datastore_pb2", ".", "Mutation", "(", ")", "self", ".", "_mutations", ".", "append", "(", "new_mutation", ")", "return", "new_mutation", ".", "upsert" ]
Adds a new mutation for an entity with a completed key. :rtype: :class:`.entity_pb2.Entity` :returns: The newly created entity protobuf that will be updated and sent with a commit.
[ "Adds", "a", "new", "mutation", "for", "an", "entity", "with", "a", "completed", "key", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/batch.py#L118-L130
train
googleapis/google-cloud-python
datastore/google/cloud/datastore/batch.py
Batch._add_delete_key_pb
def _add_delete_key_pb(self): """Adds a new mutation for a key to be deleted. :rtype: :class:`.entity_pb2.Key` :returns: The newly created key protobuf that will be deleted when sent with a commit. """ new_mutation = _datastore_pb2.Mutation() self._mutations.append(new_mutation) return new_mutation.delete
python
def _add_delete_key_pb(self): """Adds a new mutation for a key to be deleted. :rtype: :class:`.entity_pb2.Key` :returns: The newly created key protobuf that will be deleted when sent with a commit. """ new_mutation = _datastore_pb2.Mutation() self._mutations.append(new_mutation) return new_mutation.delete
[ "def", "_add_delete_key_pb", "(", "self", ")", ":", "new_mutation", "=", "_datastore_pb2", ".", "Mutation", "(", ")", "self", ".", "_mutations", ".", "append", "(", "new_mutation", ")", "return", "new_mutation", ".", "delete" ]
Adds a new mutation for a key to be deleted. :rtype: :class:`.entity_pb2.Key` :returns: The newly created key protobuf that will be deleted when sent with a commit.
[ "Adds", "a", "new", "mutation", "for", "a", "key", "to", "be", "deleted", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/batch.py#L132-L141
train
googleapis/google-cloud-python
datastore/google/cloud/datastore/batch.py
Batch.put
def put(self, entity): """Remember an entity's state to be saved during :meth:`commit`. .. note:: Any existing properties for the entity will be replaced by those currently set on this instance. Already-stored properties which do not correspond to keys set on this instance will be removed from the datastore. .. note:: Property values which are "text" ('unicode' in Python2, 'str' in Python3) map to 'string_value' in the datastore; values which are "bytes" ('str' in Python2, 'bytes' in Python3) map to 'blob_value'. When an entity has a partial key, calling :meth:`commit` sends it as an ``insert`` mutation and the key is completed. On return, the key for the ``entity`` passed in is updated to match the key ID assigned by the server. :type entity: :class:`google.cloud.datastore.entity.Entity` :param entity: the entity to be saved. :raises: :class:`~exceptions.ValueError` if the batch is not in progress, if entity has no key assigned, or if the key's ``project`` does not match ours. """ if self._status != self._IN_PROGRESS: raise ValueError("Batch must be in progress to put()") if entity.key is None: raise ValueError("Entity must have a key") if self.project != entity.key.project: raise ValueError("Key must be from same project as batch") if entity.key.is_partial: entity_pb = self._add_partial_key_entity_pb() self._partial_key_entities.append(entity) else: entity_pb = self._add_complete_key_entity_pb() _assign_entity_to_pb(entity_pb, entity)
python
def put(self, entity): """Remember an entity's state to be saved during :meth:`commit`. .. note:: Any existing properties for the entity will be replaced by those currently set on this instance. Already-stored properties which do not correspond to keys set on this instance will be removed from the datastore. .. note:: Property values which are "text" ('unicode' in Python2, 'str' in Python3) map to 'string_value' in the datastore; values which are "bytes" ('str' in Python2, 'bytes' in Python3) map to 'blob_value'. When an entity has a partial key, calling :meth:`commit` sends it as an ``insert`` mutation and the key is completed. On return, the key for the ``entity`` passed in is updated to match the key ID assigned by the server. :type entity: :class:`google.cloud.datastore.entity.Entity` :param entity: the entity to be saved. :raises: :class:`~exceptions.ValueError` if the batch is not in progress, if entity has no key assigned, or if the key's ``project`` does not match ours. """ if self._status != self._IN_PROGRESS: raise ValueError("Batch must be in progress to put()") if entity.key is None: raise ValueError("Entity must have a key") if self.project != entity.key.project: raise ValueError("Key must be from same project as batch") if entity.key.is_partial: entity_pb = self._add_partial_key_entity_pb() self._partial_key_entities.append(entity) else: entity_pb = self._add_complete_key_entity_pb() _assign_entity_to_pb(entity_pb, entity)
[ "def", "put", "(", "self", ",", "entity", ")", ":", "if", "self", ".", "_status", "!=", "self", ".", "_IN_PROGRESS", ":", "raise", "ValueError", "(", "\"Batch must be in progress to put()\"", ")", "if", "entity", ".", "key", "is", "None", ":", "raise", "ValueError", "(", "\"Entity must have a key\"", ")", "if", "self", ".", "project", "!=", "entity", ".", "key", ".", "project", ":", "raise", "ValueError", "(", "\"Key must be from same project as batch\"", ")", "if", "entity", ".", "key", ".", "is_partial", ":", "entity_pb", "=", "self", ".", "_add_partial_key_entity_pb", "(", ")", "self", ".", "_partial_key_entities", ".", "append", "(", "entity", ")", "else", ":", "entity_pb", "=", "self", ".", "_add_complete_key_entity_pb", "(", ")", "_assign_entity_to_pb", "(", "entity_pb", ",", "entity", ")" ]
Remember an entity's state to be saved during :meth:`commit`. .. note:: Any existing properties for the entity will be replaced by those currently set on this instance. Already-stored properties which do not correspond to keys set on this instance will be removed from the datastore. .. note:: Property values which are "text" ('unicode' in Python2, 'str' in Python3) map to 'string_value' in the datastore; values which are "bytes" ('str' in Python2, 'bytes' in Python3) map to 'blob_value'. When an entity has a partial key, calling :meth:`commit` sends it as an ``insert`` mutation and the key is completed. On return, the key for the ``entity`` passed in is updated to match the key ID assigned by the server. :type entity: :class:`google.cloud.datastore.entity.Entity` :param entity: the entity to be saved. :raises: :class:`~exceptions.ValueError` if the batch is not in progress, if entity has no key assigned, or if the key's ``project`` does not match ours.
[ "Remember", "an", "entity", "s", "state", "to", "be", "saved", "during", ":", "meth", ":", "commit", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/batch.py#L159-L200
train
googleapis/google-cloud-python
datastore/google/cloud/datastore/batch.py
Batch.delete
def delete(self, key): """Remember a key to be deleted during :meth:`commit`. :type key: :class:`google.cloud.datastore.key.Key` :param key: the key to be deleted. :raises: :class:`~exceptions.ValueError` if the batch is not in progress, if key is not complete, or if the key's ``project`` does not match ours. """ if self._status != self._IN_PROGRESS: raise ValueError("Batch must be in progress to delete()") if key.is_partial: raise ValueError("Key must be complete") if self.project != key.project: raise ValueError("Key must be from same project as batch") key_pb = key.to_protobuf() self._add_delete_key_pb().CopyFrom(key_pb)
python
def delete(self, key): """Remember a key to be deleted during :meth:`commit`. :type key: :class:`google.cloud.datastore.key.Key` :param key: the key to be deleted. :raises: :class:`~exceptions.ValueError` if the batch is not in progress, if key is not complete, or if the key's ``project`` does not match ours. """ if self._status != self._IN_PROGRESS: raise ValueError("Batch must be in progress to delete()") if key.is_partial: raise ValueError("Key must be complete") if self.project != key.project: raise ValueError("Key must be from same project as batch") key_pb = key.to_protobuf() self._add_delete_key_pb().CopyFrom(key_pb)
[ "def", "delete", "(", "self", ",", "key", ")", ":", "if", "self", ".", "_status", "!=", "self", ".", "_IN_PROGRESS", ":", "raise", "ValueError", "(", "\"Batch must be in progress to delete()\"", ")", "if", "key", ".", "is_partial", ":", "raise", "ValueError", "(", "\"Key must be complete\"", ")", "if", "self", ".", "project", "!=", "key", ".", "project", ":", "raise", "ValueError", "(", "\"Key must be from same project as batch\"", ")", "key_pb", "=", "key", ".", "to_protobuf", "(", ")", "self", ".", "_add_delete_key_pb", "(", ")", ".", "CopyFrom", "(", "key_pb", ")" ]
Remember a key to be deleted during :meth:`commit`. :type key: :class:`google.cloud.datastore.key.Key` :param key: the key to be deleted. :raises: :class:`~exceptions.ValueError` if the batch is not in progress, if key is not complete, or if the key's ``project`` does not match ours.
[ "Remember", "a", "key", "to", "be", "deleted", "during", ":", "meth", ":", "commit", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/batch.py#L202-L222
train
googleapis/google-cloud-python
datastore/google/cloud/datastore/batch.py
Batch.begin
def begin(self): """Begins a batch. This method is called automatically when entering a with statement, however it can be called explicitly if you don't want to use a context manager. Overridden by :class:`google.cloud.datastore.transaction.Transaction`. :raises: :class:`ValueError` if the batch has already begun. """ if self._status != self._INITIAL: raise ValueError("Batch already started previously.") self._status = self._IN_PROGRESS
python
def begin(self): """Begins a batch. This method is called automatically when entering a with statement, however it can be called explicitly if you don't want to use a context manager. Overridden by :class:`google.cloud.datastore.transaction.Transaction`. :raises: :class:`ValueError` if the batch has already begun. """ if self._status != self._INITIAL: raise ValueError("Batch already started previously.") self._status = self._IN_PROGRESS
[ "def", "begin", "(", "self", ")", ":", "if", "self", ".", "_status", "!=", "self", ".", "_INITIAL", ":", "raise", "ValueError", "(", "\"Batch already started previously.\"", ")", "self", ".", "_status", "=", "self", ".", "_IN_PROGRESS" ]
Begins a batch. This method is called automatically when entering a with statement, however it can be called explicitly if you don't want to use a context manager. Overridden by :class:`google.cloud.datastore.transaction.Transaction`. :raises: :class:`ValueError` if the batch has already begun.
[ "Begins", "a", "batch", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/batch.py#L224-L237
train
googleapis/google-cloud-python
datastore/google/cloud/datastore/batch.py
Batch._commit
def _commit(self): """Commits the batch. This is called by :meth:`commit`. """ if self._id is None: mode = _datastore_pb2.CommitRequest.NON_TRANSACTIONAL else: mode = _datastore_pb2.CommitRequest.TRANSACTIONAL commit_response_pb = self._client._datastore_api.commit( self.project, mode, self._mutations, transaction=self._id ) _, updated_keys = _parse_commit_response(commit_response_pb) # If the back-end returns without error, we are guaranteed that # ``commit`` will return keys that match (length and # order) directly ``_partial_key_entities``. for new_key_pb, entity in zip(updated_keys, self._partial_key_entities): new_id = new_key_pb.path[-1].id entity.key = entity.key.completed_key(new_id)
python
def _commit(self): """Commits the batch. This is called by :meth:`commit`. """ if self._id is None: mode = _datastore_pb2.CommitRequest.NON_TRANSACTIONAL else: mode = _datastore_pb2.CommitRequest.TRANSACTIONAL commit_response_pb = self._client._datastore_api.commit( self.project, mode, self._mutations, transaction=self._id ) _, updated_keys = _parse_commit_response(commit_response_pb) # If the back-end returns without error, we are guaranteed that # ``commit`` will return keys that match (length and # order) directly ``_partial_key_entities``. for new_key_pb, entity in zip(updated_keys, self._partial_key_entities): new_id = new_key_pb.path[-1].id entity.key = entity.key.completed_key(new_id)
[ "def", "_commit", "(", "self", ")", ":", "if", "self", ".", "_id", "is", "None", ":", "mode", "=", "_datastore_pb2", ".", "CommitRequest", ".", "NON_TRANSACTIONAL", "else", ":", "mode", "=", "_datastore_pb2", ".", "CommitRequest", ".", "TRANSACTIONAL", "commit_response_pb", "=", "self", ".", "_client", ".", "_datastore_api", ".", "commit", "(", "self", ".", "project", ",", "mode", ",", "self", ".", "_mutations", ",", "transaction", "=", "self", ".", "_id", ")", "_", ",", "updated_keys", "=", "_parse_commit_response", "(", "commit_response_pb", ")", "# If the back-end returns without error, we are guaranteed that", "# ``commit`` will return keys that match (length and", "# order) directly ``_partial_key_entities``.", "for", "new_key_pb", ",", "entity", "in", "zip", "(", "updated_keys", ",", "self", ".", "_partial_key_entities", ")", ":", "new_id", "=", "new_key_pb", ".", "path", "[", "-", "1", "]", ".", "id", "entity", ".", "key", "=", "entity", ".", "key", ".", "completed_key", "(", "new_id", ")" ]
Commits the batch. This is called by :meth:`commit`.
[ "Commits", "the", "batch", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/batch.py#L239-L258
train
googleapis/google-cloud-python
datastore/google/cloud/datastore/batch.py
Batch.commit
def commit(self): """Commits the batch. This is called automatically upon exiting a with statement, however it can be called explicitly if you don't want to use a context manager. :raises: :class:`~exceptions.ValueError` if the batch is not in progress. """ if self._status != self._IN_PROGRESS: raise ValueError("Batch must be in progress to commit()") try: self._commit() finally: self._status = self._FINISHED
python
def commit(self): """Commits the batch. This is called automatically upon exiting a with statement, however it can be called explicitly if you don't want to use a context manager. :raises: :class:`~exceptions.ValueError` if the batch is not in progress. """ if self._status != self._IN_PROGRESS: raise ValueError("Batch must be in progress to commit()") try: self._commit() finally: self._status = self._FINISHED
[ "def", "commit", "(", "self", ")", ":", "if", "self", ".", "_status", "!=", "self", ".", "_IN_PROGRESS", ":", "raise", "ValueError", "(", "\"Batch must be in progress to commit()\"", ")", "try", ":", "self", ".", "_commit", "(", ")", "finally", ":", "self", ".", "_status", "=", "self", ".", "_FINISHED" ]
Commits the batch. This is called automatically upon exiting a with statement, however it can be called explicitly if you don't want to use a context manager. :raises: :class:`~exceptions.ValueError` if the batch is not in progress.
[ "Commits", "the", "batch", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/batch.py#L260-L276
train
googleapis/google-cloud-python
datastore/google/cloud/datastore/batch.py
Batch.rollback
def rollback(self): """Rolls back the current batch. Marks the batch as aborted (can't be used again). Overridden by :class:`google.cloud.datastore.transaction.Transaction`. :raises: :class:`~exceptions.ValueError` if the batch is not in progress. """ if self._status != self._IN_PROGRESS: raise ValueError("Batch must be in progress to rollback()") self._status = self._ABORTED
python
def rollback(self): """Rolls back the current batch. Marks the batch as aborted (can't be used again). Overridden by :class:`google.cloud.datastore.transaction.Transaction`. :raises: :class:`~exceptions.ValueError` if the batch is not in progress. """ if self._status != self._IN_PROGRESS: raise ValueError("Batch must be in progress to rollback()") self._status = self._ABORTED
[ "def", "rollback", "(", "self", ")", ":", "if", "self", ".", "_status", "!=", "self", ".", "_IN_PROGRESS", ":", "raise", "ValueError", "(", "\"Batch must be in progress to rollback()\"", ")", "self", ".", "_status", "=", "self", ".", "_ABORTED" ]
Rolls back the current batch. Marks the batch as aborted (can't be used again). Overridden by :class:`google.cloud.datastore.transaction.Transaction`. :raises: :class:`~exceptions.ValueError` if the batch is not in progress.
[ "Rolls", "back", "the", "current", "batch", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/batch.py#L278-L291
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py
BigtableTableAdminClient.snapshot_path
def snapshot_path(cls, project, instance, cluster, snapshot): """Return a fully-qualified snapshot string.""" return google.api_core.path_template.expand( "projects/{project}/instances/{instance}/clusters/{cluster}/snapshots/{snapshot}", project=project, instance=instance, cluster=cluster, snapshot=snapshot, )
python
def snapshot_path(cls, project, instance, cluster, snapshot): """Return a fully-qualified snapshot string.""" return google.api_core.path_template.expand( "projects/{project}/instances/{instance}/clusters/{cluster}/snapshots/{snapshot}", project=project, instance=instance, cluster=cluster, snapshot=snapshot, )
[ "def", "snapshot_path", "(", "cls", ",", "project", ",", "instance", ",", "cluster", ",", "snapshot", ")", ":", "return", "google", ".", "api_core", ".", "path_template", ".", "expand", "(", "\"projects/{project}/instances/{instance}/clusters/{cluster}/snapshots/{snapshot}\"", ",", "project", "=", "project", ",", "instance", "=", "instance", ",", "cluster", "=", "cluster", ",", "snapshot", "=", "snapshot", ",", ")" ]
Return a fully-qualified snapshot string.
[ "Return", "a", "fully", "-", "qualified", "snapshot", "string", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py#L112-L120
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py
BigtableTableAdminClient.create_table
def create_table( self, parent, table_id, table, initial_splits=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a new table in the specified instance. The table can be created with a full set of initial column families, specified in the request. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableTableAdminClient() >>> >>> parent = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> # TODO: Initialize `table_id`: >>> table_id = '' >>> >>> # TODO: Initialize `table`: >>> table = {} >>> >>> response = client.create_table(parent, table_id, table) Args: parent (str): The unique name of the instance in which to create the table. Values are of the form ``projects/<project>/instances/<instance>``. table_id (str): The name by which the new table should be referred to within the parent instance, e.g., ``foobar`` rather than ``<parent>/tables/foobar``. table (Union[dict, ~google.cloud.bigtable_admin_v2.types.Table]): The Table to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigtable_admin_v2.types.Table` initial_splits (list[Union[dict, ~google.cloud.bigtable_admin_v2.types.Split]]): The optional list of row keys that will be used to initially split the table into several tablets (tablets are similar to HBase regions). Given two split keys, ``s1`` and ``s2``, three tablets will be created, spanning the key ranges: ``[, s1), [s1, s2), [s2, )``. Example: - Row keys := ``["a", "apple", "custom", "customer_1", "customer_2",`` ``"other", "zz"]`` - initial\_split\_keys := ``["apple", "customer_1", "customer_2", "other"]`` - Key assignment: - Tablet 1 ``[, apple) => {"a"}.`` - Tablet 2 ``[apple, customer_1) => {"apple", "custom"}.`` - Tablet 3 ``[customer_1, customer_2) => {"customer_1"}.`` - Tablet 4 ``[customer_2, other) => {"customer_2"}.`` - Tablet 5 ``[other, ) => {"other", "zz"}.`` If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigtable_admin_v2.types.Split` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.bigtable_admin_v2.types.Table` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "create_table" not in self._inner_api_calls: self._inner_api_calls[ "create_table" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_table, default_retry=self._method_configs["CreateTable"].retry, default_timeout=self._method_configs["CreateTable"].timeout, client_info=self._client_info, ) request = bigtable_table_admin_pb2.CreateTableRequest( parent=parent, table_id=table_id, table=table, initial_splits=initial_splits ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["create_table"]( request, retry=retry, timeout=timeout, metadata=metadata )
python
def create_table( self, parent, table_id, table, initial_splits=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a new table in the specified instance. The table can be created with a full set of initial column families, specified in the request. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableTableAdminClient() >>> >>> parent = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> # TODO: Initialize `table_id`: >>> table_id = '' >>> >>> # TODO: Initialize `table`: >>> table = {} >>> >>> response = client.create_table(parent, table_id, table) Args: parent (str): The unique name of the instance in which to create the table. Values are of the form ``projects/<project>/instances/<instance>``. table_id (str): The name by which the new table should be referred to within the parent instance, e.g., ``foobar`` rather than ``<parent>/tables/foobar``. table (Union[dict, ~google.cloud.bigtable_admin_v2.types.Table]): The Table to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigtable_admin_v2.types.Table` initial_splits (list[Union[dict, ~google.cloud.bigtable_admin_v2.types.Split]]): The optional list of row keys that will be used to initially split the table into several tablets (tablets are similar to HBase regions). Given two split keys, ``s1`` and ``s2``, three tablets will be created, spanning the key ranges: ``[, s1), [s1, s2), [s2, )``. Example: - Row keys := ``["a", "apple", "custom", "customer_1", "customer_2",`` ``"other", "zz"]`` - initial\_split\_keys := ``["apple", "customer_1", "customer_2", "other"]`` - Key assignment: - Tablet 1 ``[, apple) => {"a"}.`` - Tablet 2 ``[apple, customer_1) => {"apple", "custom"}.`` - Tablet 3 ``[customer_1, customer_2) => {"customer_1"}.`` - Tablet 4 ``[customer_2, other) => {"customer_2"}.`` - Tablet 5 ``[other, ) => {"other", "zz"}.`` If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigtable_admin_v2.types.Split` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.bigtable_admin_v2.types.Table` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "create_table" not in self._inner_api_calls: self._inner_api_calls[ "create_table" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_table, default_retry=self._method_configs["CreateTable"].retry, default_timeout=self._method_configs["CreateTable"].timeout, client_info=self._client_info, ) request = bigtable_table_admin_pb2.CreateTableRequest( parent=parent, table_id=table_id, table=table, initial_splits=initial_splits ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) return self._inner_api_calls["create_table"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def", "create_table", "(", "self", ",", "parent", ",", "table_id", ",", "table", ",", "initial_splits", "=", "None", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"create_table\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"create_table\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "create_table", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"CreateTable\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"CreateTable\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "bigtable_table_admin_pb2", ".", "CreateTableRequest", "(", "parent", "=", "parent", ",", "table_id", "=", "table_id", ",", "table", "=", "table", ",", "initial_splits", "=", "initial_splits", ")", "if", "metadata", "is", "None", ":", "metadata", "=", "[", "]", "metadata", "=", "list", "(", "metadata", ")", "try", ":", "routing_header", "=", "[", "(", "\"parent\"", ",", "parent", ")", "]", "except", "AttributeError", ":", "pass", "else", ":", "routing_metadata", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "routing_header", ".", "to_grpc_metadata", "(", "routing_header", ")", "metadata", ".", "append", "(", "routing_metadata", ")", "return", "self", ".", "_inner_api_calls", "[", "\"create_table\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
Creates a new table in the specified instance. The table can be created with a full set of initial column families, specified in the request. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableTableAdminClient() >>> >>> parent = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> # TODO: Initialize `table_id`: >>> table_id = '' >>> >>> # TODO: Initialize `table`: >>> table = {} >>> >>> response = client.create_table(parent, table_id, table) Args: parent (str): The unique name of the instance in which to create the table. Values are of the form ``projects/<project>/instances/<instance>``. table_id (str): The name by which the new table should be referred to within the parent instance, e.g., ``foobar`` rather than ``<parent>/tables/foobar``. table (Union[dict, ~google.cloud.bigtable_admin_v2.types.Table]): The Table to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigtable_admin_v2.types.Table` initial_splits (list[Union[dict, ~google.cloud.bigtable_admin_v2.types.Split]]): The optional list of row keys that will be used to initially split the table into several tablets (tablets are similar to HBase regions). Given two split keys, ``s1`` and ``s2``, three tablets will be created, spanning the key ranges: ``[, s1), [s1, s2), [s2, )``. Example: - Row keys := ``["a", "apple", "custom", "customer_1", "customer_2",`` ``"other", "zz"]`` - initial\_split\_keys := ``["apple", "customer_1", "customer_2", "other"]`` - Key assignment: - Tablet 1 ``[, apple) => {"a"}.`` - Tablet 2 ``[apple, customer_1) => {"apple", "custom"}.`` - Tablet 3 ``[customer_1, customer_2) => {"customer_1"}.`` - Tablet 4 ``[customer_2, other) => {"customer_2"}.`` - Tablet 5 ``[other, ) => {"other", "zz"}.`` If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigtable_admin_v2.types.Split` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.bigtable_admin_v2.types.Table` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Creates", "a", "new", "table", "in", "the", "specified", "instance", ".", "The", "table", "can", "be", "created", "with", "a", "full", "set", "of", "initial", "column", "families", "specified", "in", "the", "request", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py#L231-L339
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py
BigtableTableAdminClient.create_table_from_snapshot
def create_table_from_snapshot( self, parent, table_id, source_snapshot, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a new table from the specified snapshot. The target table must not exist. The snapshot and the table must be in the same instance. Note: This is a private alpha release of Cloud Bigtable snapshots. This feature is not currently available to most Cloud Bigtable customers. This feature might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableTableAdminClient() >>> >>> parent = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> # TODO: Initialize `table_id`: >>> table_id = '' >>> >>> # TODO: Initialize `source_snapshot`: >>> source_snapshot = '' >>> >>> response = client.create_table_from_snapshot(parent, table_id, source_snapshot) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): The unique name of the instance in which to create the table. Values are of the form ``projects/<project>/instances/<instance>``. table_id (str): The name by which the new table should be referred to within the parent instance, e.g., ``foobar`` rather than ``<parent>/tables/foobar``. source_snapshot (str): The unique name of the snapshot from which to restore the table. The snapshot and the table must be in the same instance. Values are of the form ``projects/<project>/instances/<instance>/clusters/<cluster>/snapshots/<snapshot>``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.bigtable_admin_v2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "create_table_from_snapshot" not in self._inner_api_calls: self._inner_api_calls[ "create_table_from_snapshot" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_table_from_snapshot, default_retry=self._method_configs["CreateTableFromSnapshot"].retry, default_timeout=self._method_configs["CreateTableFromSnapshot"].timeout, client_info=self._client_info, ) request = bigtable_table_admin_pb2.CreateTableFromSnapshotRequest( parent=parent, table_id=table_id, source_snapshot=source_snapshot ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["create_table_from_snapshot"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, table_pb2.Table, metadata_type=bigtable_table_admin_pb2.CreateTableFromSnapshotMetadata, )
python
def create_table_from_snapshot( self, parent, table_id, source_snapshot, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a new table from the specified snapshot. The target table must not exist. The snapshot and the table must be in the same instance. Note: This is a private alpha release of Cloud Bigtable snapshots. This feature is not currently available to most Cloud Bigtable customers. This feature might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableTableAdminClient() >>> >>> parent = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> # TODO: Initialize `table_id`: >>> table_id = '' >>> >>> # TODO: Initialize `source_snapshot`: >>> source_snapshot = '' >>> >>> response = client.create_table_from_snapshot(parent, table_id, source_snapshot) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): The unique name of the instance in which to create the table. Values are of the form ``projects/<project>/instances/<instance>``. table_id (str): The name by which the new table should be referred to within the parent instance, e.g., ``foobar`` rather than ``<parent>/tables/foobar``. source_snapshot (str): The unique name of the snapshot from which to restore the table. The snapshot and the table must be in the same instance. Values are of the form ``projects/<project>/instances/<instance>/clusters/<cluster>/snapshots/<snapshot>``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.bigtable_admin_v2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "create_table_from_snapshot" not in self._inner_api_calls: self._inner_api_calls[ "create_table_from_snapshot" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_table_from_snapshot, default_retry=self._method_configs["CreateTableFromSnapshot"].retry, default_timeout=self._method_configs["CreateTableFromSnapshot"].timeout, client_info=self._client_info, ) request = bigtable_table_admin_pb2.CreateTableFromSnapshotRequest( parent=parent, table_id=table_id, source_snapshot=source_snapshot ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("parent", parent)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["create_table_from_snapshot"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, table_pb2.Table, metadata_type=bigtable_table_admin_pb2.CreateTableFromSnapshotMetadata, )
[ "def", "create_table_from_snapshot", "(", "self", ",", "parent", ",", "table_id", ",", "source_snapshot", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"create_table_from_snapshot\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"create_table_from_snapshot\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "create_table_from_snapshot", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"CreateTableFromSnapshot\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"CreateTableFromSnapshot\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "bigtable_table_admin_pb2", ".", "CreateTableFromSnapshotRequest", "(", "parent", "=", "parent", ",", "table_id", "=", "table_id", ",", "source_snapshot", "=", "source_snapshot", ")", "if", "metadata", "is", "None", ":", "metadata", "=", "[", "]", "metadata", "=", "list", "(", "metadata", ")", "try", ":", "routing_header", "=", "[", "(", "\"parent\"", ",", "parent", ")", "]", "except", "AttributeError", ":", "pass", "else", ":", "routing_metadata", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "routing_header", ".", "to_grpc_metadata", "(", "routing_header", ")", "metadata", ".", "append", "(", "routing_metadata", ")", "operation", "=", "self", ".", "_inner_api_calls", "[", "\"create_table_from_snapshot\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")", "return", "google", ".", "api_core", ".", "operation", ".", "from_gapic", "(", "operation", ",", "self", ".", "transport", ".", "_operations_client", ",", "table_pb2", ".", "Table", ",", "metadata_type", "=", "bigtable_table_admin_pb2", ".", "CreateTableFromSnapshotMetadata", ",", ")" ]
Creates a new table from the specified snapshot. The target table must not exist. The snapshot and the table must be in the same instance. Note: This is a private alpha release of Cloud Bigtable snapshots. This feature is not currently available to most Cloud Bigtable customers. This feature might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableTableAdminClient() >>> >>> parent = client.instance_path('[PROJECT]', '[INSTANCE]') >>> >>> # TODO: Initialize `table_id`: >>> table_id = '' >>> >>> # TODO: Initialize `source_snapshot`: >>> source_snapshot = '' >>> >>> response = client.create_table_from_snapshot(parent, table_id, source_snapshot) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: parent (str): The unique name of the instance in which to create the table. Values are of the form ``projects/<project>/instances/<instance>``. table_id (str): The name by which the new table should be referred to within the parent instance, e.g., ``foobar`` rather than ``<parent>/tables/foobar``. source_snapshot (str): The unique name of the snapshot from which to restore the table. The snapshot and the table must be in the same instance. Values are of the form ``projects/<project>/instances/<instance>/clusters/<cluster>/snapshots/<snapshot>``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.bigtable_admin_v2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Creates", "a", "new", "table", "from", "the", "specified", "snapshot", ".", "The", "target", "table", "must", "not", "exist", ".", "The", "snapshot", "and", "the", "table", "must", "be", "in", "the", "same", "instance", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py#L341-L447
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py
BigtableTableAdminClient.drop_row_range
def drop_row_range( self, name, row_key_prefix=None, delete_all_data_from_table=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Permanently drop/delete a row range from a specified table. The request can specify whether to delete all rows in a table, or only those that match a particular prefix. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableTableAdminClient() >>> >>> name = client.table_path('[PROJECT]', '[INSTANCE]', '[TABLE]') >>> >>> client.drop_row_range(name) Args: name (str): The unique name of the table on which to drop a range of rows. Values are of the form ``projects/<project>/instances/<instance>/tables/<table>``. row_key_prefix (bytes): Delete all rows that start with this row key prefix. Prefix cannot be zero length. delete_all_data_from_table (bool): Delete all rows in the table. Setting this to false is a no-op. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "drop_row_range" not in self._inner_api_calls: self._inner_api_calls[ "drop_row_range" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.drop_row_range, default_retry=self._method_configs["DropRowRange"].retry, default_timeout=self._method_configs["DropRowRange"].timeout, client_info=self._client_info, ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof( row_key_prefix=row_key_prefix, delete_all_data_from_table=delete_all_data_from_table, ) request = bigtable_table_admin_pb2.DropRowRangeRequest( name=name, row_key_prefix=row_key_prefix, delete_all_data_from_table=delete_all_data_from_table, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) self._inner_api_calls["drop_row_range"]( request, retry=retry, timeout=timeout, metadata=metadata )
python
def drop_row_range( self, name, row_key_prefix=None, delete_all_data_from_table=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Permanently drop/delete a row range from a specified table. The request can specify whether to delete all rows in a table, or only those that match a particular prefix. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableTableAdminClient() >>> >>> name = client.table_path('[PROJECT]', '[INSTANCE]', '[TABLE]') >>> >>> client.drop_row_range(name) Args: name (str): The unique name of the table on which to drop a range of rows. Values are of the form ``projects/<project>/instances/<instance>/tables/<table>``. row_key_prefix (bytes): Delete all rows that start with this row key prefix. Prefix cannot be zero length. delete_all_data_from_table (bool): Delete all rows in the table. Setting this to false is a no-op. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "drop_row_range" not in self._inner_api_calls: self._inner_api_calls[ "drop_row_range" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.drop_row_range, default_retry=self._method_configs["DropRowRange"].retry, default_timeout=self._method_configs["DropRowRange"].timeout, client_info=self._client_info, ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof( row_key_prefix=row_key_prefix, delete_all_data_from_table=delete_all_data_from_table, ) request = bigtable_table_admin_pb2.DropRowRangeRequest( name=name, row_key_prefix=row_key_prefix, delete_all_data_from_table=delete_all_data_from_table, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) self._inner_api_calls["drop_row_range"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def", "drop_row_range", "(", "self", ",", "name", ",", "row_key_prefix", "=", "None", ",", "delete_all_data_from_table", "=", "None", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"drop_row_range\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"drop_row_range\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "drop_row_range", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"DropRowRange\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"DropRowRange\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "# Sanity check: We have some fields which are mutually exclusive;", "# raise ValueError if more than one is sent.", "google", ".", "api_core", ".", "protobuf_helpers", ".", "check_oneof", "(", "row_key_prefix", "=", "row_key_prefix", ",", "delete_all_data_from_table", "=", "delete_all_data_from_table", ",", ")", "request", "=", "bigtable_table_admin_pb2", ".", "DropRowRangeRequest", "(", "name", "=", "name", ",", "row_key_prefix", "=", "row_key_prefix", ",", "delete_all_data_from_table", "=", "delete_all_data_from_table", ",", ")", "if", "metadata", "is", "None", ":", "metadata", "=", "[", "]", "metadata", "=", "list", "(", "metadata", ")", "try", ":", "routing_header", "=", "[", "(", "\"name\"", ",", "name", ")", "]", "except", "AttributeError", ":", "pass", "else", ":", "routing_metadata", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "routing_header", ".", "to_grpc_metadata", "(", "routing_header", ")", "metadata", ".", "append", "(", "routing_metadata", ")", "self", ".", "_inner_api_calls", "[", "\"drop_row_range\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
Permanently drop/delete a row range from a specified table. The request can specify whether to delete all rows in a table, or only those that match a particular prefix. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableTableAdminClient() >>> >>> name = client.table_path('[PROJECT]', '[INSTANCE]', '[TABLE]') >>> >>> client.drop_row_range(name) Args: name (str): The unique name of the table on which to drop a range of rows. Values are of the form ``projects/<project>/instances/<instance>/tables/<table>``. row_key_prefix (bytes): Delete all rows that start with this row key prefix. Prefix cannot be zero length. delete_all_data_from_table (bool): Delete all rows in the table. Setting this to false is a no-op. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Permanently", "drop", "/", "delete", "a", "row", "range", "from", "a", "specified", "table", ".", "The", "request", "can", "specify", "whether", "to", "delete", "all", "rows", "in", "a", "table", "or", "only", "those", "that", "match", "a", "particular", "prefix", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py#L780-L864
train
googleapis/google-cloud-python
bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py
BigtableTableAdminClient.snapshot_table
def snapshot_table( self, name, cluster, snapshot_id, description, ttl=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a new snapshot in the specified cluster from the specified source table. The cluster and the table must be in the same instance. Note: This is a private alpha release of Cloud Bigtable snapshots. This feature is not currently available to most Cloud Bigtable customers. This feature might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableTableAdminClient() >>> >>> name = client.table_path('[PROJECT]', '[INSTANCE]', '[TABLE]') >>> >>> # TODO: Initialize `cluster`: >>> cluster = '' >>> >>> # TODO: Initialize `snapshot_id`: >>> snapshot_id = '' >>> >>> # TODO: Initialize `description`: >>> description = '' >>> >>> response = client.snapshot_table(name, cluster, snapshot_id, description) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: name (str): The unique name of the table to have the snapshot taken. Values are of the form ``projects/<project>/instances/<instance>/tables/<table>``. cluster (str): The name of the cluster where the snapshot will be created in. Values are of the form ``projects/<project>/instances/<instance>/clusters/<cluster>``. snapshot_id (str): The ID by which the new snapshot should be referred to within the parent cluster, e.g., ``mysnapshot`` of the form: ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*`` rather than ``projects/<project>/instances/<instance>/clusters/<cluster>/snapshots/mysnapshot``. description (str): Description of the snapshot. ttl (Union[dict, ~google.cloud.bigtable_admin_v2.types.Duration]): The amount of time that the new snapshot can stay active after it is created. Once 'ttl' expires, the snapshot will get deleted. The maximum amount of time a snapshot can stay active is 7 days. If 'ttl' is not specified, the default value of 24 hours will be used. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigtable_admin_v2.types.Duration` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.bigtable_admin_v2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "snapshot_table" not in self._inner_api_calls: self._inner_api_calls[ "snapshot_table" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.snapshot_table, default_retry=self._method_configs["SnapshotTable"].retry, default_timeout=self._method_configs["SnapshotTable"].timeout, client_info=self._client_info, ) request = bigtable_table_admin_pb2.SnapshotTableRequest( name=name, cluster=cluster, snapshot_id=snapshot_id, description=description, ttl=ttl, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["snapshot_table"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, table_pb2.Snapshot, metadata_type=bigtable_table_admin_pb2.SnapshotTableMetadata, )
python
def snapshot_table( self, name, cluster, snapshot_id, description, ttl=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a new snapshot in the specified cluster from the specified source table. The cluster and the table must be in the same instance. Note: This is a private alpha release of Cloud Bigtable snapshots. This feature is not currently available to most Cloud Bigtable customers. This feature might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableTableAdminClient() >>> >>> name = client.table_path('[PROJECT]', '[INSTANCE]', '[TABLE]') >>> >>> # TODO: Initialize `cluster`: >>> cluster = '' >>> >>> # TODO: Initialize `snapshot_id`: >>> snapshot_id = '' >>> >>> # TODO: Initialize `description`: >>> description = '' >>> >>> response = client.snapshot_table(name, cluster, snapshot_id, description) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: name (str): The unique name of the table to have the snapshot taken. Values are of the form ``projects/<project>/instances/<instance>/tables/<table>``. cluster (str): The name of the cluster where the snapshot will be created in. Values are of the form ``projects/<project>/instances/<instance>/clusters/<cluster>``. snapshot_id (str): The ID by which the new snapshot should be referred to within the parent cluster, e.g., ``mysnapshot`` of the form: ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*`` rather than ``projects/<project>/instances/<instance>/clusters/<cluster>/snapshots/mysnapshot``. description (str): Description of the snapshot. ttl (Union[dict, ~google.cloud.bigtable_admin_v2.types.Duration]): The amount of time that the new snapshot can stay active after it is created. Once 'ttl' expires, the snapshot will get deleted. The maximum amount of time a snapshot can stay active is 7 days. If 'ttl' is not specified, the default value of 24 hours will be used. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigtable_admin_v2.types.Duration` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.bigtable_admin_v2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "snapshot_table" not in self._inner_api_calls: self._inner_api_calls[ "snapshot_table" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.snapshot_table, default_retry=self._method_configs["SnapshotTable"].retry, default_timeout=self._method_configs["SnapshotTable"].timeout, client_info=self._client_info, ) request = bigtable_table_admin_pb2.SnapshotTableRequest( name=name, cluster=cluster, snapshot_id=snapshot_id, description=description, ttl=ttl, ) if metadata is None: metadata = [] metadata = list(metadata) try: routing_header = [("name", name)] except AttributeError: pass else: routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( routing_header ) metadata.append(routing_metadata) operation = self._inner_api_calls["snapshot_table"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, table_pb2.Snapshot, metadata_type=bigtable_table_admin_pb2.SnapshotTableMetadata, )
[ "def", "snapshot_table", "(", "self", ",", "name", ",", "cluster", ",", "snapshot_id", ",", "description", ",", "ttl", "=", "None", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"snapshot_table\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"snapshot_table\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "snapshot_table", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"SnapshotTable\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"SnapshotTable\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "bigtable_table_admin_pb2", ".", "SnapshotTableRequest", "(", "name", "=", "name", ",", "cluster", "=", "cluster", ",", "snapshot_id", "=", "snapshot_id", ",", "description", "=", "description", ",", "ttl", "=", "ttl", ",", ")", "if", "metadata", "is", "None", ":", "metadata", "=", "[", "]", "metadata", "=", "list", "(", "metadata", ")", "try", ":", "routing_header", "=", "[", "(", "\"name\"", ",", "name", ")", "]", "except", "AttributeError", ":", "pass", "else", ":", "routing_metadata", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "routing_header", ".", "to_grpc_metadata", "(", "routing_header", ")", "metadata", ".", "append", "(", "routing_metadata", ")", "operation", "=", "self", ".", "_inner_api_calls", "[", "\"snapshot_table\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")", "return", "google", ".", "api_core", ".", "operation", ".", "from_gapic", "(", "operation", ",", "self", ".", "transport", ".", "_operations_client", ",", "table_pb2", ".", "Snapshot", ",", "metadata_type", "=", "bigtable_table_admin_pb2", ".", "SnapshotTableMetadata", ",", ")" ]
Creates a new snapshot in the specified cluster from the specified source table. The cluster and the table must be in the same instance. Note: This is a private alpha release of Cloud Bigtable snapshots. This feature is not currently available to most Cloud Bigtable customers. This feature might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. Example: >>> from google.cloud import bigtable_admin_v2 >>> >>> client = bigtable_admin_v2.BigtableTableAdminClient() >>> >>> name = client.table_path('[PROJECT]', '[INSTANCE]', '[TABLE]') >>> >>> # TODO: Initialize `cluster`: >>> cluster = '' >>> >>> # TODO: Initialize `snapshot_id`: >>> snapshot_id = '' >>> >>> # TODO: Initialize `description`: >>> description = '' >>> >>> response = client.snapshot_table(name, cluster, snapshot_id, description) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: name (str): The unique name of the table to have the snapshot taken. Values are of the form ``projects/<project>/instances/<instance>/tables/<table>``. cluster (str): The name of the cluster where the snapshot will be created in. Values are of the form ``projects/<project>/instances/<instance>/clusters/<cluster>``. snapshot_id (str): The ID by which the new snapshot should be referred to within the parent cluster, e.g., ``mysnapshot`` of the form: ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*`` rather than ``projects/<project>/instances/<instance>/clusters/<cluster>/snapshots/mysnapshot``. description (str): Description of the snapshot. ttl (Union[dict, ~google.cloud.bigtable_admin_v2.types.Duration]): The amount of time that the new snapshot can stay active after it is created. Once 'ttl' expires, the snapshot will get deleted. The maximum amount of time a snapshot can stay active is 7 days. If 'ttl' is not specified, the default value of 24 hours will be used. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigtable_admin_v2.types.Duration` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.bigtable_admin_v2.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Creates", "a", "new", "snapshot", "in", "the", "specified", "cluster", "from", "the", "specified", "source", "table", ".", "The", "cluster", "and", "the", "table", "must", "be", "in", "the", "same", "instance", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py#L1022-L1146
train
googleapis/google-cloud-python
talent/google/cloud/talent_v4beta1/gapic/company_service_client.py
CompanyServiceClient.company_path
def company_path(cls, project, company): """Return a fully-qualified company string.""" return google.api_core.path_template.expand( "projects/{project}/companies/{company}", project=project, company=company )
python
def company_path(cls, project, company): """Return a fully-qualified company string.""" return google.api_core.path_template.expand( "projects/{project}/companies/{company}", project=project, company=company )
[ "def", "company_path", "(", "cls", ",", "project", ",", "company", ")", ":", "return", "google", ".", "api_core", ".", "path_template", ".", "expand", "(", "\"projects/{project}/companies/{company}\"", ",", "project", "=", "project", ",", "company", "=", "company", ")" ]
Return a fully-qualified company string.
[ "Return", "a", "fully", "-", "qualified", "company", "string", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/talent/google/cloud/talent_v4beta1/gapic/company_service_client.py#L81-L85
train
googleapis/google-cloud-python
talent/google/cloud/talent_v4beta1/gapic/company_service_client.py
CompanyServiceClient.create_company
def create_company( self, parent, company, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a new company entity. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.CompanyServiceClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> # TODO: Initialize `company`: >>> company = {} >>> >>> response = client.create_company(parent, company) Args: parent (str): Required. Resource name of the project under which the company is created. The format is "projects/{project\_id}", for example, "projects/api-test-project". company (Union[dict, ~google.cloud.talent_v4beta1.types.Company]): Required. The company to be created. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.Company` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.talent_v4beta1.types.Company` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "create_company" not in self._inner_api_calls: self._inner_api_calls[ "create_company" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_company, default_retry=self._method_configs["CreateCompany"].retry, default_timeout=self._method_configs["CreateCompany"].timeout, client_info=self._client_info, ) request = company_service_pb2.CreateCompanyRequest( parent=parent, company=company ) return self._inner_api_calls["create_company"]( request, retry=retry, timeout=timeout, metadata=metadata )
python
def create_company( self, parent, company, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Creates a new company entity. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.CompanyServiceClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> # TODO: Initialize `company`: >>> company = {} >>> >>> response = client.create_company(parent, company) Args: parent (str): Required. Resource name of the project under which the company is created. The format is "projects/{project\_id}", for example, "projects/api-test-project". company (Union[dict, ~google.cloud.talent_v4beta1.types.Company]): Required. The company to be created. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.Company` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.talent_v4beta1.types.Company` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "create_company" not in self._inner_api_calls: self._inner_api_calls[ "create_company" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_company, default_retry=self._method_configs["CreateCompany"].retry, default_timeout=self._method_configs["CreateCompany"].timeout, client_info=self._client_info, ) request = company_service_pb2.CreateCompanyRequest( parent=parent, company=company ) return self._inner_api_calls["create_company"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def", "create_company", "(", "self", ",", "parent", ",", "company", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"create_company\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"create_company\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "create_company", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"CreateCompany\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"CreateCompany\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "company_service_pb2", ".", "CreateCompanyRequest", "(", "parent", "=", "parent", ",", "company", "=", "company", ")", "return", "self", ".", "_inner_api_calls", "[", "\"create_company\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
Creates a new company entity. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.CompanyServiceClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> # TODO: Initialize `company`: >>> company = {} >>> >>> response = client.create_company(parent, company) Args: parent (str): Required. Resource name of the project under which the company is created. The format is "projects/{project\_id}", for example, "projects/api-test-project". company (Union[dict, ~google.cloud.talent_v4beta1.types.Company]): Required. The company to be created. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.Company` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.talent_v4beta1.types.Company` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Creates", "a", "new", "company", "entity", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/talent/google/cloud/talent_v4beta1/gapic/company_service_client.py#L186-L257
train
googleapis/google-cloud-python
talent/google/cloud/talent_v4beta1/gapic/company_service_client.py
CompanyServiceClient.update_company
def update_company( self, company, update_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Updates specified company. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.CompanyServiceClient() >>> >>> # TODO: Initialize `company`: >>> company = {} >>> >>> response = client.update_company(company) Args: company (Union[dict, ~google.cloud.talent_v4beta1.types.Company]): Required. The company resource to replace the current resource in the system. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.Company` update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Optional but strongly recommended for the best service experience. If ``update_mask`` is provided, only the specified fields in ``company`` are updated. Otherwise all the fields are updated. A field mask to specify the company fields to be updated. Only top level fields of ``Company`` are supported. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.talent_v4beta1.types.Company` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "update_company" not in self._inner_api_calls: self._inner_api_calls[ "update_company" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_company, default_retry=self._method_configs["UpdateCompany"].retry, default_timeout=self._method_configs["UpdateCompany"].timeout, client_info=self._client_info, ) request = company_service_pb2.UpdateCompanyRequest( company=company, update_mask=update_mask ) return self._inner_api_calls["update_company"]( request, retry=retry, timeout=timeout, metadata=metadata )
python
def update_company( self, company, update_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Updates specified company. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.CompanyServiceClient() >>> >>> # TODO: Initialize `company`: >>> company = {} >>> >>> response = client.update_company(company) Args: company (Union[dict, ~google.cloud.talent_v4beta1.types.Company]): Required. The company resource to replace the current resource in the system. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.Company` update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Optional but strongly recommended for the best service experience. If ``update_mask`` is provided, only the specified fields in ``company`` are updated. Otherwise all the fields are updated. A field mask to specify the company fields to be updated. Only top level fields of ``Company`` are supported. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.talent_v4beta1.types.Company` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "update_company" not in self._inner_api_calls: self._inner_api_calls[ "update_company" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_company, default_retry=self._method_configs["UpdateCompany"].retry, default_timeout=self._method_configs["UpdateCompany"].timeout, client_info=self._client_info, ) request = company_service_pb2.UpdateCompanyRequest( company=company, update_mask=update_mask ) return self._inner_api_calls["update_company"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def", "update_company", "(", "self", ",", "company", ",", "update_mask", "=", "None", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"update_company\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"update_company\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "update_company", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"UpdateCompany\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"UpdateCompany\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "company_service_pb2", ".", "UpdateCompanyRequest", "(", "company", "=", "company", ",", "update_mask", "=", "update_mask", ")", "return", "self", ".", "_inner_api_calls", "[", "\"update_company\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
Updates specified company. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.CompanyServiceClient() >>> >>> # TODO: Initialize `company`: >>> company = {} >>> >>> response = client.update_company(company) Args: company (Union[dict, ~google.cloud.talent_v4beta1.types.Company]): Required. The company resource to replace the current resource in the system. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.Company` update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Optional but strongly recommended for the best service experience. If ``update_mask`` is provided, only the specified fields in ``company`` are updated. Otherwise all the fields are updated. A field mask to specify the company fields to be updated. Only top level fields of ``Company`` are supported. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.talent_v4beta1.types.Company` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Updates", "specified", "company", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/talent/google/cloud/talent_v4beta1/gapic/company_service_client.py#L320-L393
train
googleapis/google-cloud-python
talent/google/cloud/talent_v4beta1/gapic/company_service_client.py
CompanyServiceClient.delete_company
def delete_company( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Deletes specified company. Prerequisite: The company has no jobs associated with it. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.CompanyServiceClient() >>> >>> name = client.company_path('[PROJECT]', '[COMPANY]') >>> >>> client.delete_company(name) Args: name (str): Required. The resource name of the company to be deleted. The format is "projects/{project\_id}/companies/{company\_id}", for example, "projects/api-test-project/companies/foo". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "delete_company" not in self._inner_api_calls: self._inner_api_calls[ "delete_company" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_company, default_retry=self._method_configs["DeleteCompany"].retry, default_timeout=self._method_configs["DeleteCompany"].timeout, client_info=self._client_info, ) request = company_service_pb2.DeleteCompanyRequest(name=name) self._inner_api_calls["delete_company"]( request, retry=retry, timeout=timeout, metadata=metadata )
python
def delete_company( self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Deletes specified company. Prerequisite: The company has no jobs associated with it. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.CompanyServiceClient() >>> >>> name = client.company_path('[PROJECT]', '[COMPANY]') >>> >>> client.delete_company(name) Args: name (str): Required. The resource name of the company to be deleted. The format is "projects/{project\_id}/companies/{company\_id}", for example, "projects/api-test-project/companies/foo". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "delete_company" not in self._inner_api_calls: self._inner_api_calls[ "delete_company" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_company, default_retry=self._method_configs["DeleteCompany"].retry, default_timeout=self._method_configs["DeleteCompany"].timeout, client_info=self._client_info, ) request = company_service_pb2.DeleteCompanyRequest(name=name) self._inner_api_calls["delete_company"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def", "delete_company", "(", "self", ",", "name", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"delete_company\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"delete_company\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "delete_company", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"DeleteCompany\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"DeleteCompany\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "company_service_pb2", ".", "DeleteCompanyRequest", "(", "name", "=", "name", ")", "self", ".", "_inner_api_calls", "[", "\"delete_company\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
Deletes specified company. Prerequisite: The company has no jobs associated with it. Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.CompanyServiceClient() >>> >>> name = client.company_path('[PROJECT]', '[COMPANY]') >>> >>> client.delete_company(name) Args: name (str): Required. The resource name of the company to be deleted. The format is "projects/{project\_id}/companies/{company\_id}", for example, "projects/api-test-project/companies/foo". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Deletes", "specified", "company", ".", "Prerequisite", ":", "The", "company", "has", "no", "jobs", "associated", "with", "it", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/talent/google/cloud/talent_v4beta1/gapic/company_service_client.py#L395-L452
train
googleapis/google-cloud-python
dns/google/cloud/dns/client.py
Client.quotas
def quotas(self): """Return DNS quotas for the project associated with this client. See https://cloud.google.com/dns/api/v1/projects/get :rtype: mapping :returns: keys for the mapping correspond to those of the ``quota`` sub-mapping of the project resource. """ path = "/projects/%s" % (self.project,) resp = self._connection.api_request(method="GET", path=path) return { key: int(value) for key, value in resp["quota"].items() if key != "kind" }
python
def quotas(self): """Return DNS quotas for the project associated with this client. See https://cloud.google.com/dns/api/v1/projects/get :rtype: mapping :returns: keys for the mapping correspond to those of the ``quota`` sub-mapping of the project resource. """ path = "/projects/%s" % (self.project,) resp = self._connection.api_request(method="GET", path=path) return { key: int(value) for key, value in resp["quota"].items() if key != "kind" }
[ "def", "quotas", "(", "self", ")", ":", "path", "=", "\"/projects/%s\"", "%", "(", "self", ".", "project", ",", ")", "resp", "=", "self", ".", "_connection", ".", "api_request", "(", "method", "=", "\"GET\"", ",", "path", "=", "path", ")", "return", "{", "key", ":", "int", "(", "value", ")", "for", "key", ",", "value", "in", "resp", "[", "\"quota\"", "]", ".", "items", "(", ")", "if", "key", "!=", "\"kind\"", "}" ]
Return DNS quotas for the project associated with this client. See https://cloud.google.com/dns/api/v1/projects/get :rtype: mapping :returns: keys for the mapping correspond to those of the ``quota`` sub-mapping of the project resource.
[ "Return", "DNS", "quotas", "for", "the", "project", "associated", "with", "this", "client", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/dns/google/cloud/dns/client.py#L57-L72
train
googleapis/google-cloud-python
dns/google/cloud/dns/client.py
Client.list_zones
def list_zones(self, max_results=None, page_token=None): """List zones for the project associated with this client. See https://cloud.google.com/dns/api/v1/managedZones/list :type max_results: int :param max_results: maximum number of zones to return, If not passed, defaults to a value set by the API. :type page_token: str :param page_token: Optional. If present, return the next batch of zones, using the value, which must correspond to the ``nextPageToken`` value returned in the previous response. Deprecated: use the ``pages`` property of the returned iterator instead of manually passing the token. :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.dns.zone.ManagedZone` belonging to this project. """ path = "/projects/%s/managedZones" % (self.project,) return page_iterator.HTTPIterator( client=self, api_request=self._connection.api_request, path=path, item_to_value=_item_to_zone, items_key="managedZones", page_token=page_token, max_results=max_results, )
python
def list_zones(self, max_results=None, page_token=None): """List zones for the project associated with this client. See https://cloud.google.com/dns/api/v1/managedZones/list :type max_results: int :param max_results: maximum number of zones to return, If not passed, defaults to a value set by the API. :type page_token: str :param page_token: Optional. If present, return the next batch of zones, using the value, which must correspond to the ``nextPageToken`` value returned in the previous response. Deprecated: use the ``pages`` property of the returned iterator instead of manually passing the token. :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.dns.zone.ManagedZone` belonging to this project. """ path = "/projects/%s/managedZones" % (self.project,) return page_iterator.HTTPIterator( client=self, api_request=self._connection.api_request, path=path, item_to_value=_item_to_zone, items_key="managedZones", page_token=page_token, max_results=max_results, )
[ "def", "list_zones", "(", "self", ",", "max_results", "=", "None", ",", "page_token", "=", "None", ")", ":", "path", "=", "\"/projects/%s/managedZones\"", "%", "(", "self", ".", "project", ",", ")", "return", "page_iterator", ".", "HTTPIterator", "(", "client", "=", "self", ",", "api_request", "=", "self", ".", "_connection", ".", "api_request", ",", "path", "=", "path", ",", "item_to_value", "=", "_item_to_zone", ",", "items_key", "=", "\"managedZones\"", ",", "page_token", "=", "page_token", ",", "max_results", "=", "max_results", ",", ")" ]
List zones for the project associated with this client. See https://cloud.google.com/dns/api/v1/managedZones/list :type max_results: int :param max_results: maximum number of zones to return, If not passed, defaults to a value set by the API. :type page_token: str :param page_token: Optional. If present, return the next batch of zones, using the value, which must correspond to the ``nextPageToken`` value returned in the previous response. Deprecated: use the ``pages`` property of the returned iterator instead of manually passing the token. :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.dns.zone.ManagedZone` belonging to this project.
[ "List", "zones", "for", "the", "project", "associated", "with", "this", "client", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/dns/google/cloud/dns/client.py#L74-L104
train
googleapis/google-cloud-python
dns/google/cloud/dns/client.py
Client.zone
def zone(self, name, dns_name=None, description=None): """Construct a zone bound to this client. :type name: str :param name: Name of the zone. :type dns_name: str :param dns_name: (Optional) DNS name of the zone. If not passed, then calls to :meth:`zone.create` will fail. :type description: str :param description: (Optional) the description for the zone. If not passed, defaults to the value of 'dns_name'. :rtype: :class:`google.cloud.dns.zone.ManagedZone` :returns: a new ``ManagedZone`` instance. """ return ManagedZone(name, dns_name, client=self, description=description)
python
def zone(self, name, dns_name=None, description=None): """Construct a zone bound to this client. :type name: str :param name: Name of the zone. :type dns_name: str :param dns_name: (Optional) DNS name of the zone. If not passed, then calls to :meth:`zone.create` will fail. :type description: str :param description: (Optional) the description for the zone. If not passed, defaults to the value of 'dns_name'. :rtype: :class:`google.cloud.dns.zone.ManagedZone` :returns: a new ``ManagedZone`` instance. """ return ManagedZone(name, dns_name, client=self, description=description)
[ "def", "zone", "(", "self", ",", "name", ",", "dns_name", "=", "None", ",", "description", "=", "None", ")", ":", "return", "ManagedZone", "(", "name", ",", "dns_name", ",", "client", "=", "self", ",", "description", "=", "description", ")" ]
Construct a zone bound to this client. :type name: str :param name: Name of the zone. :type dns_name: str :param dns_name: (Optional) DNS name of the zone. If not passed, then calls to :meth:`zone.create` will fail. :type description: str :param description: (Optional) the description for the zone. If not passed, defaults to the value of 'dns_name'. :rtype: :class:`google.cloud.dns.zone.ManagedZone` :returns: a new ``ManagedZone`` instance.
[ "Construct", "a", "zone", "bound", "to", "this", "client", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/dns/google/cloud/dns/client.py#L106-L125
train
googleapis/google-cloud-python
trace/google/cloud/trace/v1/client.py
Client.patch_traces
def patch_traces(self, traces, project_id=None): """Sends new traces to Stackdriver Trace or updates existing traces. Args: traces (dict): Required. The traces to be patched in the API call. project_id (Optional[str]): ID of the Cloud project where the trace data is stored. """ if project_id is None: project_id = self.project self.trace_api.patch_traces(project_id=project_id, traces=traces)
python
def patch_traces(self, traces, project_id=None): """Sends new traces to Stackdriver Trace or updates existing traces. Args: traces (dict): Required. The traces to be patched in the API call. project_id (Optional[str]): ID of the Cloud project where the trace data is stored. """ if project_id is None: project_id = self.project self.trace_api.patch_traces(project_id=project_id, traces=traces)
[ "def", "patch_traces", "(", "self", ",", "traces", ",", "project_id", "=", "None", ")", ":", "if", "project_id", "is", "None", ":", "project_id", "=", "self", ".", "project", "self", ".", "trace_api", ".", "patch_traces", "(", "project_id", "=", "project_id", ",", "traces", "=", "traces", ")" ]
Sends new traces to Stackdriver Trace or updates existing traces. Args: traces (dict): Required. The traces to be patched in the API call. project_id (Optional[str]): ID of the Cloud project where the trace data is stored.
[ "Sends", "new", "traces", "to", "Stackdriver", "Trace", "or", "updates", "existing", "traces", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/trace/google/cloud/trace/v1/client.py#L60-L72
train
googleapis/google-cloud-python
trace/google/cloud/trace/v1/client.py
Client.get_trace
def get_trace(self, trace_id, project_id=None): """ Gets a single trace by its ID. Args: trace_id (str): ID of the trace to return. project_id (str): Required. ID of the Cloud project where the trace data is stored. Returns: A Trace dict. """ if project_id is None: project_id = self.project return self.trace_api.get_trace(project_id=project_id, trace_id=trace_id)
python
def get_trace(self, trace_id, project_id=None): """ Gets a single trace by its ID. Args: trace_id (str): ID of the trace to return. project_id (str): Required. ID of the Cloud project where the trace data is stored. Returns: A Trace dict. """ if project_id is None: project_id = self.project return self.trace_api.get_trace(project_id=project_id, trace_id=trace_id)
[ "def", "get_trace", "(", "self", ",", "trace_id", ",", "project_id", "=", "None", ")", ":", "if", "project_id", "is", "None", ":", "project_id", "=", "self", ".", "project", "return", "self", ".", "trace_api", ".", "get_trace", "(", "project_id", "=", "project_id", ",", "trace_id", "=", "trace_id", ")" ]
Gets a single trace by its ID. Args: trace_id (str): ID of the trace to return. project_id (str): Required. ID of the Cloud project where the trace data is stored. Returns: A Trace dict.
[ "Gets", "a", "single", "trace", "by", "its", "ID", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/trace/google/cloud/trace/v1/client.py#L74-L90
train
googleapis/google-cloud-python
trace/google/cloud/trace/v1/client.py
Client.list_traces
def list_traces( self, project_id=None, view=None, page_size=None, start_time=None, end_time=None, filter_=None, order_by=None, page_token=None, ): """ Returns of a list of traces that match the filter conditions. Args: project_id (Optional[str]): ID of the Cloud project where the trace data is stored. view (Optional[~google.cloud.trace_v1.gapic.enums. ListTracesRequest.ViewType]): Type of data returned for traces in the list. Default is ``MINIMAL``. page_size (Optional[int]): Maximum number of traces to return. If not specified or <= 0, the implementation selects a reasonable value. The implementation may return fewer traces than the requested page size. start_time (Optional[~datetime.datetime]): Start of the time interval (inclusive) during which the trace data was collected from the application. end_time (Optional[~datetime.datetime]): End of the time interval (inclusive) during which the trace data was collected from the application. filter_ (Optional[str]): An optional filter for the request. order_by (Optional[str]): Field used to sort the returned traces. page_token (Optional[str]): opaque marker for the next "page" of entries. If not passed, the API will return the first page of entries. Returns: A :class:`~google.api_core.page_iterator.Iterator` of traces that match the specified filter conditions. """ if project_id is None: project_id = self.project if start_time is not None: start_time = _datetime_to_pb_timestamp(start_time) if end_time is not None: end_time = _datetime_to_pb_timestamp(end_time) return self.trace_api.list_traces( project_id=project_id, view=view, page_size=page_size, start_time=start_time, end_time=end_time, filter_=filter_, order_by=order_by, page_token=page_token, )
python
def list_traces( self, project_id=None, view=None, page_size=None, start_time=None, end_time=None, filter_=None, order_by=None, page_token=None, ): """ Returns of a list of traces that match the filter conditions. Args: project_id (Optional[str]): ID of the Cloud project where the trace data is stored. view (Optional[~google.cloud.trace_v1.gapic.enums. ListTracesRequest.ViewType]): Type of data returned for traces in the list. Default is ``MINIMAL``. page_size (Optional[int]): Maximum number of traces to return. If not specified or <= 0, the implementation selects a reasonable value. The implementation may return fewer traces than the requested page size. start_time (Optional[~datetime.datetime]): Start of the time interval (inclusive) during which the trace data was collected from the application. end_time (Optional[~datetime.datetime]): End of the time interval (inclusive) during which the trace data was collected from the application. filter_ (Optional[str]): An optional filter for the request. order_by (Optional[str]): Field used to sort the returned traces. page_token (Optional[str]): opaque marker for the next "page" of entries. If not passed, the API will return the first page of entries. Returns: A :class:`~google.api_core.page_iterator.Iterator` of traces that match the specified filter conditions. """ if project_id is None: project_id = self.project if start_time is not None: start_time = _datetime_to_pb_timestamp(start_time) if end_time is not None: end_time = _datetime_to_pb_timestamp(end_time) return self.trace_api.list_traces( project_id=project_id, view=view, page_size=page_size, start_time=start_time, end_time=end_time, filter_=filter_, order_by=order_by, page_token=page_token, )
[ "def", "list_traces", "(", "self", ",", "project_id", "=", "None", ",", "view", "=", "None", ",", "page_size", "=", "None", ",", "start_time", "=", "None", ",", "end_time", "=", "None", ",", "filter_", "=", "None", ",", "order_by", "=", "None", ",", "page_token", "=", "None", ",", ")", ":", "if", "project_id", "is", "None", ":", "project_id", "=", "self", ".", "project", "if", "start_time", "is", "not", "None", ":", "start_time", "=", "_datetime_to_pb_timestamp", "(", "start_time", ")", "if", "end_time", "is", "not", "None", ":", "end_time", "=", "_datetime_to_pb_timestamp", "(", "end_time", ")", "return", "self", ".", "trace_api", ".", "list_traces", "(", "project_id", "=", "project_id", ",", "view", "=", "view", ",", "page_size", "=", "page_size", ",", "start_time", "=", "start_time", ",", "end_time", "=", "end_time", ",", "filter_", "=", "filter_", ",", "order_by", "=", "order_by", ",", "page_token", "=", "page_token", ",", ")" ]
Returns of a list of traces that match the filter conditions. Args: project_id (Optional[str]): ID of the Cloud project where the trace data is stored. view (Optional[~google.cloud.trace_v1.gapic.enums. ListTracesRequest.ViewType]): Type of data returned for traces in the list. Default is ``MINIMAL``. page_size (Optional[int]): Maximum number of traces to return. If not specified or <= 0, the implementation selects a reasonable value. The implementation may return fewer traces than the requested page size. start_time (Optional[~datetime.datetime]): Start of the time interval (inclusive) during which the trace data was collected from the application. end_time (Optional[~datetime.datetime]): End of the time interval (inclusive) during which the trace data was collected from the application. filter_ (Optional[str]): An optional filter for the request. order_by (Optional[str]): Field used to sort the returned traces. page_token (Optional[str]): opaque marker for the next "page" of entries. If not passed, the API will return the first page of entries. Returns: A :class:`~google.api_core.page_iterator.Iterator` of traces that match the specified filter conditions.
[ "Returns", "of", "a", "list", "of", "traces", "that", "match", "the", "filter", "conditions", "." ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/trace/google/cloud/trace/v1/client.py#L92-L157
train
googleapis/google-cloud-python
talent/google/cloud/talent_v4beta1/gapic/resume_service_client.py
ResumeServiceClient.parse_resume
def parse_resume( self, parent, resume, region_code=None, language_code=None, options_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Parses a resume into a ``Profile``. The API attempts to fill out the following profile fields if present within the resume: - personNames - addresses - emailAddress - phoneNumbers - personalUris - employmentRecords - educationRecords - skills Note that some attributes in these fields may not be populated if they're not present within the resume or unrecognizable by the resume parser. This API does not save the resume or profile. To create a profile from this resume, clients need to call the CreateProfile method again with the profile returned. The following list of formats are supported: - PDF - TXT - DOC - RTF - DOCX - PNG (only when ``ParseResumeRequest.enable_ocr`` is set to ``true``, otherwise an error is thrown) Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.ResumeServiceClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> # TODO: Initialize `resume`: >>> resume = b'' >>> >>> response = client.parse_resume(parent, resume) Args: parent (str): Required. The resource name of the project. The format is "projects/{project\_id}", for example, "projects/api-test-project". resume (bytes): Required. The bytes of the resume file in common format, for example, PDF, TXT. UTF-8 encoding is required if the resume is text-based, otherwise an error is thrown. region_code (str): Optional. The region code indicating where the resume is from. Values are as per the ISO-3166-2 format. For example, US, FR, DE. This value is optional, but providing this value improves the resume parsing quality and performance. An error is thrown if the regionCode is invalid. language_code (str): Optional. The language code of contents in the resume. Language codes must be in BCP-47 format, such as "en-US" or "sr-Latn". For more information, see `Tags for Identifying Languages <https://tools.ietf.org/html/bcp47>`__\ {: class="external" target="\_blank" }. options_ (Union[dict, ~google.cloud.talent_v4beta1.types.ParseResumeOptions]): Optional. Options that change how the resume parse is performed. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.ParseResumeOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.talent_v4beta1.types.ParseResumeResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "parse_resume" not in self._inner_api_calls: self._inner_api_calls[ "parse_resume" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.parse_resume, default_retry=self._method_configs["ParseResume"].retry, default_timeout=self._method_configs["ParseResume"].timeout, client_info=self._client_info, ) request = resume_service_pb2.ParseResumeRequest( parent=parent, resume=resume, region_code=region_code, language_code=language_code, options=options_, ) return self._inner_api_calls["parse_resume"]( request, retry=retry, timeout=timeout, metadata=metadata )
python
def parse_resume( self, parent, resume, region_code=None, language_code=None, options_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Parses a resume into a ``Profile``. The API attempts to fill out the following profile fields if present within the resume: - personNames - addresses - emailAddress - phoneNumbers - personalUris - employmentRecords - educationRecords - skills Note that some attributes in these fields may not be populated if they're not present within the resume or unrecognizable by the resume parser. This API does not save the resume or profile. To create a profile from this resume, clients need to call the CreateProfile method again with the profile returned. The following list of formats are supported: - PDF - TXT - DOC - RTF - DOCX - PNG (only when ``ParseResumeRequest.enable_ocr`` is set to ``true``, otherwise an error is thrown) Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.ResumeServiceClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> # TODO: Initialize `resume`: >>> resume = b'' >>> >>> response = client.parse_resume(parent, resume) Args: parent (str): Required. The resource name of the project. The format is "projects/{project\_id}", for example, "projects/api-test-project". resume (bytes): Required. The bytes of the resume file in common format, for example, PDF, TXT. UTF-8 encoding is required if the resume is text-based, otherwise an error is thrown. region_code (str): Optional. The region code indicating where the resume is from. Values are as per the ISO-3166-2 format. For example, US, FR, DE. This value is optional, but providing this value improves the resume parsing quality and performance. An error is thrown if the regionCode is invalid. language_code (str): Optional. The language code of contents in the resume. Language codes must be in BCP-47 format, such as "en-US" or "sr-Latn". For more information, see `Tags for Identifying Languages <https://tools.ietf.org/html/bcp47>`__\ {: class="external" target="\_blank" }. options_ (Union[dict, ~google.cloud.talent_v4beta1.types.ParseResumeOptions]): Optional. Options that change how the resume parse is performed. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.ParseResumeOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.talent_v4beta1.types.ParseResumeResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "parse_resume" not in self._inner_api_calls: self._inner_api_calls[ "parse_resume" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.parse_resume, default_retry=self._method_configs["ParseResume"].retry, default_timeout=self._method_configs["ParseResume"].timeout, client_info=self._client_info, ) request = resume_service_pb2.ParseResumeRequest( parent=parent, resume=resume, region_code=region_code, language_code=language_code, options=options_, ) return self._inner_api_calls["parse_resume"]( request, retry=retry, timeout=timeout, metadata=metadata )
[ "def", "parse_resume", "(", "self", ",", "parent", ",", "resume", ",", "region_code", "=", "None", ",", "language_code", "=", "None", ",", "options_", "=", "None", ",", "retry", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "timeout", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "DEFAULT", ",", "metadata", "=", "None", ",", ")", ":", "# Wrap the transport method to add retry and timeout logic.", "if", "\"parse_resume\"", "not", "in", "self", ".", "_inner_api_calls", ":", "self", ".", "_inner_api_calls", "[", "\"parse_resume\"", "]", "=", "google", ".", "api_core", ".", "gapic_v1", ".", "method", ".", "wrap_method", "(", "self", ".", "transport", ".", "parse_resume", ",", "default_retry", "=", "self", ".", "_method_configs", "[", "\"ParseResume\"", "]", ".", "retry", ",", "default_timeout", "=", "self", ".", "_method_configs", "[", "\"ParseResume\"", "]", ".", "timeout", ",", "client_info", "=", "self", ".", "_client_info", ",", ")", "request", "=", "resume_service_pb2", ".", "ParseResumeRequest", "(", "parent", "=", "parent", ",", "resume", "=", "resume", ",", "region_code", "=", "region_code", ",", "language_code", "=", "language_code", ",", "options", "=", "options_", ",", ")", "return", "self", ".", "_inner_api_calls", "[", "\"parse_resume\"", "]", "(", "request", ",", "retry", "=", "retry", ",", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ")" ]
Parses a resume into a ``Profile``. The API attempts to fill out the following profile fields if present within the resume: - personNames - addresses - emailAddress - phoneNumbers - personalUris - employmentRecords - educationRecords - skills Note that some attributes in these fields may not be populated if they're not present within the resume or unrecognizable by the resume parser. This API does not save the resume or profile. To create a profile from this resume, clients need to call the CreateProfile method again with the profile returned. The following list of formats are supported: - PDF - TXT - DOC - RTF - DOCX - PNG (only when ``ParseResumeRequest.enable_ocr`` is set to ``true``, otherwise an error is thrown) Example: >>> from google.cloud import talent_v4beta1 >>> >>> client = talent_v4beta1.ResumeServiceClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> # TODO: Initialize `resume`: >>> resume = b'' >>> >>> response = client.parse_resume(parent, resume) Args: parent (str): Required. The resource name of the project. The format is "projects/{project\_id}", for example, "projects/api-test-project". resume (bytes): Required. The bytes of the resume file in common format, for example, PDF, TXT. UTF-8 encoding is required if the resume is text-based, otherwise an error is thrown. region_code (str): Optional. The region code indicating where the resume is from. Values are as per the ISO-3166-2 format. For example, US, FR, DE. This value is optional, but providing this value improves the resume parsing quality and performance. An error is thrown if the regionCode is invalid. language_code (str): Optional. The language code of contents in the resume. Language codes must be in BCP-47 format, such as "en-US" or "sr-Latn". For more information, see `Tags for Identifying Languages <https://tools.ietf.org/html/bcp47>`__\ {: class="external" target="\_blank" }. options_ (Union[dict, ~google.cloud.talent_v4beta1.types.ParseResumeOptions]): Optional. Options that change how the resume parse is performed. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.ParseResumeOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.talent_v4beta1.types.ParseResumeResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
[ "Parses", "a", "resume", "into", "a", "Profile", ".", "The", "API", "attempts", "to", "fill", "out", "the", "following", "profile", "fields", "if", "present", "within", "the", "resume", ":" ]
85e80125a59cb10f8cb105f25ecc099e4b940b50
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/talent/google/cloud/talent_v4beta1/gapic/resume_service_client.py#L193-L321
train