repo
stringlengths 7
55
| path
stringlengths 4
223
| func_name
stringlengths 1
134
| original_string
stringlengths 75
104k
| language
stringclasses 1
value | code
stringlengths 75
104k
| code_tokens
listlengths 19
28.4k
| docstring
stringlengths 1
46.9k
| docstring_tokens
listlengths 1
1.97k
| sha
stringlengths 40
40
| url
stringlengths 87
315
| partition
stringclasses 1
value |
|---|---|---|---|---|---|---|---|---|---|---|---|
googleapis/google-cloud-python
|
api_core/google/api_core/bidi.py
|
BackgroundConsumer.start
|
def start(self):
"""Start the background thread and begin consuming the thread."""
with self._operational_lock:
ready = threading.Event()
thread = threading.Thread(
name=_BIDIRECTIONAL_CONSUMER_NAME,
target=self._thread_main,
args=(ready,)
)
thread.daemon = True
thread.start()
# Other parts of the code rely on `thread.is_alive` which
# isn't sufficient to know if a thread is active, just that it may
# soon be active. This can cause races. Further protect
# against races by using a ready event and wait on it to be set.
ready.wait()
self._thread = thread
_LOGGER.debug("Started helper thread %s", thread.name)
|
python
|
def start(self):
"""Start the background thread and begin consuming the thread."""
with self._operational_lock:
ready = threading.Event()
thread = threading.Thread(
name=_BIDIRECTIONAL_CONSUMER_NAME,
target=self._thread_main,
args=(ready,)
)
thread.daemon = True
thread.start()
# Other parts of the code rely on `thread.is_alive` which
# isn't sufficient to know if a thread is active, just that it may
# soon be active. This can cause races. Further protect
# against races by using a ready event and wait on it to be set.
ready.wait()
self._thread = thread
_LOGGER.debug("Started helper thread %s", thread.name)
|
[
"def",
"start",
"(",
"self",
")",
":",
"with",
"self",
".",
"_operational_lock",
":",
"ready",
"=",
"threading",
".",
"Event",
"(",
")",
"thread",
"=",
"threading",
".",
"Thread",
"(",
"name",
"=",
"_BIDIRECTIONAL_CONSUMER_NAME",
",",
"target",
"=",
"self",
".",
"_thread_main",
",",
"args",
"=",
"(",
"ready",
",",
")",
")",
"thread",
".",
"daemon",
"=",
"True",
"thread",
".",
"start",
"(",
")",
"# Other parts of the code rely on `thread.is_alive` which",
"# isn't sufficient to know if a thread is active, just that it may",
"# soon be active. This can cause races. Further protect",
"# against races by using a ready event and wait on it to be set.",
"ready",
".",
"wait",
"(",
")",
"self",
".",
"_thread",
"=",
"thread",
"_LOGGER",
".",
"debug",
"(",
"\"Started helper thread %s\"",
",",
"thread",
".",
"name",
")"
] |
Start the background thread and begin consuming the thread.
|
[
"Start",
"the",
"background",
"thread",
"and",
"begin",
"consuming",
"the",
"thread",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/api_core/google/api_core/bidi.py#L569-L586
|
train
|
googleapis/google-cloud-python
|
api_core/google/api_core/bidi.py
|
BackgroundConsumer.stop
|
def stop(self):
"""Stop consuming the stream and shutdown the background thread."""
with self._operational_lock:
self._bidi_rpc.close()
if self._thread is not None:
# Resume the thread to wake it up in case it is sleeping.
self.resume()
self._thread.join()
self._thread = None
|
python
|
def stop(self):
"""Stop consuming the stream and shutdown the background thread."""
with self._operational_lock:
self._bidi_rpc.close()
if self._thread is not None:
# Resume the thread to wake it up in case it is sleeping.
self.resume()
self._thread.join()
self._thread = None
|
[
"def",
"stop",
"(",
"self",
")",
":",
"with",
"self",
".",
"_operational_lock",
":",
"self",
".",
"_bidi_rpc",
".",
"close",
"(",
")",
"if",
"self",
".",
"_thread",
"is",
"not",
"None",
":",
"# Resume the thread to wake it up in case it is sleeping.",
"self",
".",
"resume",
"(",
")",
"self",
".",
"_thread",
".",
"join",
"(",
")",
"self",
".",
"_thread",
"=",
"None"
] |
Stop consuming the stream and shutdown the background thread.
|
[
"Stop",
"consuming",
"the",
"stream",
"and",
"shutdown",
"the",
"background",
"thread",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/api_core/google/api_core/bidi.py#L588-L598
|
train
|
googleapis/google-cloud-python
|
api_core/google/api_core/bidi.py
|
BackgroundConsumer.resume
|
def resume(self):
"""Resumes the response stream."""
with self._wake:
self._paused = False
self._wake.notifyAll()
|
python
|
def resume(self):
"""Resumes the response stream."""
with self._wake:
self._paused = False
self._wake.notifyAll()
|
[
"def",
"resume",
"(",
"self",
")",
":",
"with",
"self",
".",
"_wake",
":",
"self",
".",
"_paused",
"=",
"False",
"self",
".",
"_wake",
".",
"notifyAll",
"(",
")"
] |
Resumes the response stream.
|
[
"Resumes",
"the",
"response",
"stream",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/api_core/google/api_core/bidi.py#L613-L617
|
train
|
googleapis/google-cloud-python
|
oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py
|
OsLoginServiceClient.project_path
|
def project_path(cls, user, project):
"""Return a fully-qualified project string."""
return google.api_core.path_template.expand(
"users/{user}/projects/{project}", user=user, project=project
)
|
python
|
def project_path(cls, user, project):
"""Return a fully-qualified project string."""
return google.api_core.path_template.expand(
"users/{user}/projects/{project}", user=user, project=project
)
|
[
"def",
"project_path",
"(",
"cls",
",",
"user",
",",
"project",
")",
":",
"return",
"google",
".",
"api_core",
".",
"path_template",
".",
"expand",
"(",
"\"users/{user}/projects/{project}\"",
",",
"user",
"=",
"user",
",",
"project",
"=",
"project",
")"
] |
Return a fully-qualified project string.
|
[
"Return",
"a",
"fully",
"-",
"qualified",
"project",
"string",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py#L81-L85
|
train
|
googleapis/google-cloud-python
|
oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py
|
OsLoginServiceClient.fingerprint_path
|
def fingerprint_path(cls, user, fingerprint):
"""Return a fully-qualified fingerprint string."""
return google.api_core.path_template.expand(
"users/{user}/sshPublicKeys/{fingerprint}",
user=user,
fingerprint=fingerprint,
)
|
python
|
def fingerprint_path(cls, user, fingerprint):
"""Return a fully-qualified fingerprint string."""
return google.api_core.path_template.expand(
"users/{user}/sshPublicKeys/{fingerprint}",
user=user,
fingerprint=fingerprint,
)
|
[
"def",
"fingerprint_path",
"(",
"cls",
",",
"user",
",",
"fingerprint",
")",
":",
"return",
"google",
".",
"api_core",
".",
"path_template",
".",
"expand",
"(",
"\"users/{user}/sshPublicKeys/{fingerprint}\"",
",",
"user",
"=",
"user",
",",
"fingerprint",
"=",
"fingerprint",
",",
")"
] |
Return a fully-qualified fingerprint string.
|
[
"Return",
"a",
"fully",
"-",
"qualified",
"fingerprint",
"string",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py#L88-L94
|
train
|
googleapis/google-cloud-python
|
oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py
|
OsLoginServiceClient.delete_posix_account
|
def delete_posix_account(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Deletes a POSIX account.
Example:
>>> from google.cloud import oslogin_v1
>>>
>>> client = oslogin_v1.OsLoginServiceClient()
>>>
>>> name = client.project_path('[USER]', '[PROJECT]')
>>>
>>> client.delete_posix_account(name)
Args:
name (str): A reference to the POSIX account to update. POSIX accounts are
identified by the project ID they are associated with. A reference to
the POSIX account is in format ``users/{user}/projects/{project}``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "delete_posix_account" not in self._inner_api_calls:
self._inner_api_calls[
"delete_posix_account"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_posix_account,
default_retry=self._method_configs["DeletePosixAccount"].retry,
default_timeout=self._method_configs["DeletePosixAccount"].timeout,
client_info=self._client_info,
)
request = oslogin_pb2.DeletePosixAccountRequest(name=name)
self._inner_api_calls["delete_posix_account"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
python
|
def delete_posix_account(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Deletes a POSIX account.
Example:
>>> from google.cloud import oslogin_v1
>>>
>>> client = oslogin_v1.OsLoginServiceClient()
>>>
>>> name = client.project_path('[USER]', '[PROJECT]')
>>>
>>> client.delete_posix_account(name)
Args:
name (str): A reference to the POSIX account to update. POSIX accounts are
identified by the project ID they are associated with. A reference to
the POSIX account is in format ``users/{user}/projects/{project}``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "delete_posix_account" not in self._inner_api_calls:
self._inner_api_calls[
"delete_posix_account"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_posix_account,
default_retry=self._method_configs["DeletePosixAccount"].retry,
default_timeout=self._method_configs["DeletePosixAccount"].timeout,
client_info=self._client_info,
)
request = oslogin_pb2.DeletePosixAccountRequest(name=name)
self._inner_api_calls["delete_posix_account"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
[
"def",
"delete_posix_account",
"(",
"self",
",",
"name",
",",
"retry",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"metadata",
"=",
"None",
",",
")",
":",
"# Wrap the transport method to add retry and timeout logic.",
"if",
"\"delete_posix_account\"",
"not",
"in",
"self",
".",
"_inner_api_calls",
":",
"self",
".",
"_inner_api_calls",
"[",
"\"delete_posix_account\"",
"]",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"wrap_method",
"(",
"self",
".",
"transport",
".",
"delete_posix_account",
",",
"default_retry",
"=",
"self",
".",
"_method_configs",
"[",
"\"DeletePosixAccount\"",
"]",
".",
"retry",
",",
"default_timeout",
"=",
"self",
".",
"_method_configs",
"[",
"\"DeletePosixAccount\"",
"]",
".",
"timeout",
",",
"client_info",
"=",
"self",
".",
"_client_info",
",",
")",
"request",
"=",
"oslogin_pb2",
".",
"DeletePosixAccountRequest",
"(",
"name",
"=",
"name",
")",
"self",
".",
"_inner_api_calls",
"[",
"\"delete_posix_account\"",
"]",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
")"
] |
Deletes a POSIX account.
Example:
>>> from google.cloud import oslogin_v1
>>>
>>> client = oslogin_v1.OsLoginServiceClient()
>>>
>>> name = client.project_path('[USER]', '[PROJECT]')
>>>
>>> client.delete_posix_account(name)
Args:
name (str): A reference to the POSIX account to update. POSIX accounts are
identified by the project ID they are associated with. A reference to
the POSIX account is in format ``users/{user}/projects/{project}``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
|
[
"Deletes",
"a",
"POSIX",
"account",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py#L195-L248
|
train
|
googleapis/google-cloud-python
|
oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py
|
OsLoginServiceClient.import_ssh_public_key
|
def import_ssh_public_key(
self,
parent,
ssh_public_key,
project_id=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Adds an SSH public key and returns the profile information. Default POSIX
account information is set when no username and UID exist as part of the
login profile.
Example:
>>> from google.cloud import oslogin_v1
>>>
>>> client = oslogin_v1.OsLoginServiceClient()
>>>
>>> parent = client.user_path('[USER]')
>>>
>>> # TODO: Initialize `ssh_public_key`:
>>> ssh_public_key = {}
>>>
>>> response = client.import_ssh_public_key(parent, ssh_public_key)
Args:
parent (str): The unique ID for the user in format ``users/{user}``.
ssh_public_key (Union[dict, ~google.cloud.oslogin_v1.types.SshPublicKey]): The SSH public key and expiration time.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.oslogin_v1.types.SshPublicKey`
project_id (str): The project ID of the Google Cloud Platform project.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.oslogin_v1.types.ImportSshPublicKeyResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "import_ssh_public_key" not in self._inner_api_calls:
self._inner_api_calls[
"import_ssh_public_key"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.import_ssh_public_key,
default_retry=self._method_configs["ImportSshPublicKey"].retry,
default_timeout=self._method_configs["ImportSshPublicKey"].timeout,
client_info=self._client_info,
)
request = oslogin_pb2.ImportSshPublicKeyRequest(
parent=parent, ssh_public_key=ssh_public_key, project_id=project_id
)
return self._inner_api_calls["import_ssh_public_key"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
python
|
def import_ssh_public_key(
self,
parent,
ssh_public_key,
project_id=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Adds an SSH public key and returns the profile information. Default POSIX
account information is set when no username and UID exist as part of the
login profile.
Example:
>>> from google.cloud import oslogin_v1
>>>
>>> client = oslogin_v1.OsLoginServiceClient()
>>>
>>> parent = client.user_path('[USER]')
>>>
>>> # TODO: Initialize `ssh_public_key`:
>>> ssh_public_key = {}
>>>
>>> response = client.import_ssh_public_key(parent, ssh_public_key)
Args:
parent (str): The unique ID for the user in format ``users/{user}``.
ssh_public_key (Union[dict, ~google.cloud.oslogin_v1.types.SshPublicKey]): The SSH public key and expiration time.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.oslogin_v1.types.SshPublicKey`
project_id (str): The project ID of the Google Cloud Platform project.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.oslogin_v1.types.ImportSshPublicKeyResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "import_ssh_public_key" not in self._inner_api_calls:
self._inner_api_calls[
"import_ssh_public_key"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.import_ssh_public_key,
default_retry=self._method_configs["ImportSshPublicKey"].retry,
default_timeout=self._method_configs["ImportSshPublicKey"].timeout,
client_info=self._client_info,
)
request = oslogin_pb2.ImportSshPublicKeyRequest(
parent=parent, ssh_public_key=ssh_public_key, project_id=project_id
)
return self._inner_api_calls["import_ssh_public_key"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
[
"def",
"import_ssh_public_key",
"(",
"self",
",",
"parent",
",",
"ssh_public_key",
",",
"project_id",
"=",
"None",
",",
"retry",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"metadata",
"=",
"None",
",",
")",
":",
"# Wrap the transport method to add retry and timeout logic.",
"if",
"\"import_ssh_public_key\"",
"not",
"in",
"self",
".",
"_inner_api_calls",
":",
"self",
".",
"_inner_api_calls",
"[",
"\"import_ssh_public_key\"",
"]",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"wrap_method",
"(",
"self",
".",
"transport",
".",
"import_ssh_public_key",
",",
"default_retry",
"=",
"self",
".",
"_method_configs",
"[",
"\"ImportSshPublicKey\"",
"]",
".",
"retry",
",",
"default_timeout",
"=",
"self",
".",
"_method_configs",
"[",
"\"ImportSshPublicKey\"",
"]",
".",
"timeout",
",",
"client_info",
"=",
"self",
".",
"_client_info",
",",
")",
"request",
"=",
"oslogin_pb2",
".",
"ImportSshPublicKeyRequest",
"(",
"parent",
"=",
"parent",
",",
"ssh_public_key",
"=",
"ssh_public_key",
",",
"project_id",
"=",
"project_id",
")",
"return",
"self",
".",
"_inner_api_calls",
"[",
"\"import_ssh_public_key\"",
"]",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
")"
] |
Adds an SSH public key and returns the profile information. Default POSIX
account information is set when no username and UID exist as part of the
login profile.
Example:
>>> from google.cloud import oslogin_v1
>>>
>>> client = oslogin_v1.OsLoginServiceClient()
>>>
>>> parent = client.user_path('[USER]')
>>>
>>> # TODO: Initialize `ssh_public_key`:
>>> ssh_public_key = {}
>>>
>>> response = client.import_ssh_public_key(parent, ssh_public_key)
Args:
parent (str): The unique ID for the user in format ``users/{user}``.
ssh_public_key (Union[dict, ~google.cloud.oslogin_v1.types.SshPublicKey]): The SSH public key and expiration time.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.oslogin_v1.types.SshPublicKey`
project_id (str): The project ID of the Google Cloud Platform project.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.oslogin_v1.types.ImportSshPublicKeyResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
|
[
"Adds",
"an",
"SSH",
"public",
"key",
"and",
"returns",
"the",
"profile",
"information",
".",
"Default",
"POSIX",
"account",
"information",
"is",
"set",
"when",
"no",
"username",
"and",
"UID",
"exist",
"as",
"part",
"of",
"the",
"login",
"profile",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py#L420-L488
|
train
|
googleapis/google-cloud-python
|
oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py
|
OsLoginServiceClient.update_ssh_public_key
|
def update_ssh_public_key(
self,
name,
ssh_public_key,
update_mask=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Updates an SSH public key and returns the profile information. This method
supports patch semantics.
Example:
>>> from google.cloud import oslogin_v1
>>>
>>> client = oslogin_v1.OsLoginServiceClient()
>>>
>>> name = client.fingerprint_path('[USER]', '[FINGERPRINT]')
>>>
>>> # TODO: Initialize `ssh_public_key`:
>>> ssh_public_key = {}
>>>
>>> response = client.update_ssh_public_key(name, ssh_public_key)
Args:
name (str): The fingerprint of the public key to update. Public keys are identified
by their SHA-256 fingerprint. The fingerprint of the public key is in
format ``users/{user}/sshPublicKeys/{fingerprint}``.
ssh_public_key (Union[dict, ~google.cloud.oslogin_v1.types.SshPublicKey]): The SSH public key and expiration time.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.oslogin_v1.types.SshPublicKey`
update_mask (Union[dict, ~google.cloud.oslogin_v1.types.FieldMask]): Mask to control which fields get updated. Updates all if not present.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.oslogin_v1.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.oslogin_v1.types.SshPublicKey` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "update_ssh_public_key" not in self._inner_api_calls:
self._inner_api_calls[
"update_ssh_public_key"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_ssh_public_key,
default_retry=self._method_configs["UpdateSshPublicKey"].retry,
default_timeout=self._method_configs["UpdateSshPublicKey"].timeout,
client_info=self._client_info,
)
request = oslogin_pb2.UpdateSshPublicKeyRequest(
name=name, ssh_public_key=ssh_public_key, update_mask=update_mask
)
return self._inner_api_calls["update_ssh_public_key"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
python
|
def update_ssh_public_key(
self,
name,
ssh_public_key,
update_mask=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Updates an SSH public key and returns the profile information. This method
supports patch semantics.
Example:
>>> from google.cloud import oslogin_v1
>>>
>>> client = oslogin_v1.OsLoginServiceClient()
>>>
>>> name = client.fingerprint_path('[USER]', '[FINGERPRINT]')
>>>
>>> # TODO: Initialize `ssh_public_key`:
>>> ssh_public_key = {}
>>>
>>> response = client.update_ssh_public_key(name, ssh_public_key)
Args:
name (str): The fingerprint of the public key to update. Public keys are identified
by their SHA-256 fingerprint. The fingerprint of the public key is in
format ``users/{user}/sshPublicKeys/{fingerprint}``.
ssh_public_key (Union[dict, ~google.cloud.oslogin_v1.types.SshPublicKey]): The SSH public key and expiration time.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.oslogin_v1.types.SshPublicKey`
update_mask (Union[dict, ~google.cloud.oslogin_v1.types.FieldMask]): Mask to control which fields get updated. Updates all if not present.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.oslogin_v1.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.oslogin_v1.types.SshPublicKey` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "update_ssh_public_key" not in self._inner_api_calls:
self._inner_api_calls[
"update_ssh_public_key"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_ssh_public_key,
default_retry=self._method_configs["UpdateSshPublicKey"].retry,
default_timeout=self._method_configs["UpdateSshPublicKey"].timeout,
client_info=self._client_info,
)
request = oslogin_pb2.UpdateSshPublicKeyRequest(
name=name, ssh_public_key=ssh_public_key, update_mask=update_mask
)
return self._inner_api_calls["update_ssh_public_key"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
[
"def",
"update_ssh_public_key",
"(",
"self",
",",
"name",
",",
"ssh_public_key",
",",
"update_mask",
"=",
"None",
",",
"retry",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"metadata",
"=",
"None",
",",
")",
":",
"# Wrap the transport method to add retry and timeout logic.",
"if",
"\"update_ssh_public_key\"",
"not",
"in",
"self",
".",
"_inner_api_calls",
":",
"self",
".",
"_inner_api_calls",
"[",
"\"update_ssh_public_key\"",
"]",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"wrap_method",
"(",
"self",
".",
"transport",
".",
"update_ssh_public_key",
",",
"default_retry",
"=",
"self",
".",
"_method_configs",
"[",
"\"UpdateSshPublicKey\"",
"]",
".",
"retry",
",",
"default_timeout",
"=",
"self",
".",
"_method_configs",
"[",
"\"UpdateSshPublicKey\"",
"]",
".",
"timeout",
",",
"client_info",
"=",
"self",
".",
"_client_info",
",",
")",
"request",
"=",
"oslogin_pb2",
".",
"UpdateSshPublicKeyRequest",
"(",
"name",
"=",
"name",
",",
"ssh_public_key",
"=",
"ssh_public_key",
",",
"update_mask",
"=",
"update_mask",
")",
"return",
"self",
".",
"_inner_api_calls",
"[",
"\"update_ssh_public_key\"",
"]",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
")"
] |
Updates an SSH public key and returns the profile information. This method
supports patch semantics.
Example:
>>> from google.cloud import oslogin_v1
>>>
>>> client = oslogin_v1.OsLoginServiceClient()
>>>
>>> name = client.fingerprint_path('[USER]', '[FINGERPRINT]')
>>>
>>> # TODO: Initialize `ssh_public_key`:
>>> ssh_public_key = {}
>>>
>>> response = client.update_ssh_public_key(name, ssh_public_key)
Args:
name (str): The fingerprint of the public key to update. Public keys are identified
by their SHA-256 fingerprint. The fingerprint of the public key is in
format ``users/{user}/sshPublicKeys/{fingerprint}``.
ssh_public_key (Union[dict, ~google.cloud.oslogin_v1.types.SshPublicKey]): The SSH public key and expiration time.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.oslogin_v1.types.SshPublicKey`
update_mask (Union[dict, ~google.cloud.oslogin_v1.types.FieldMask]): Mask to control which fields get updated. Updates all if not present.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.oslogin_v1.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.oslogin_v1.types.SshPublicKey` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
|
[
"Updates",
"an",
"SSH",
"public",
"key",
"and",
"returns",
"the",
"profile",
"information",
".",
"This",
"method",
"supports",
"patch",
"semantics",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py#L490-L562
|
train
|
googleapis/google-cloud-python
|
bigtable/google/cloud/bigtable/column_family.py
|
_gc_rule_from_pb
|
def _gc_rule_from_pb(gc_rule_pb):
"""Convert a protobuf GC rule to a native object.
:type gc_rule_pb: :class:`.table_v2_pb2.GcRule`
:param gc_rule_pb: The GC rule to convert.
:rtype: :class:`GarbageCollectionRule` or :data:`NoneType <types.NoneType>`
:returns: An instance of one of the native rules defined
in :module:`column_family` or :data:`None` if no values were
set on the protobuf passed in.
:raises: :class:`ValueError <exceptions.ValueError>` if the rule name
is unexpected.
"""
rule_name = gc_rule_pb.WhichOneof("rule")
if rule_name is None:
return None
if rule_name == "max_num_versions":
return MaxVersionsGCRule(gc_rule_pb.max_num_versions)
elif rule_name == "max_age":
max_age = _helpers._duration_pb_to_timedelta(gc_rule_pb.max_age)
return MaxAgeGCRule(max_age)
elif rule_name == "union":
return GCRuleUnion([_gc_rule_from_pb(rule) for rule in gc_rule_pb.union.rules])
elif rule_name == "intersection":
rules = [_gc_rule_from_pb(rule) for rule in gc_rule_pb.intersection.rules]
return GCRuleIntersection(rules)
else:
raise ValueError("Unexpected rule name", rule_name)
|
python
|
def _gc_rule_from_pb(gc_rule_pb):
"""Convert a protobuf GC rule to a native object.
:type gc_rule_pb: :class:`.table_v2_pb2.GcRule`
:param gc_rule_pb: The GC rule to convert.
:rtype: :class:`GarbageCollectionRule` or :data:`NoneType <types.NoneType>`
:returns: An instance of one of the native rules defined
in :module:`column_family` or :data:`None` if no values were
set on the protobuf passed in.
:raises: :class:`ValueError <exceptions.ValueError>` if the rule name
is unexpected.
"""
rule_name = gc_rule_pb.WhichOneof("rule")
if rule_name is None:
return None
if rule_name == "max_num_versions":
return MaxVersionsGCRule(gc_rule_pb.max_num_versions)
elif rule_name == "max_age":
max_age = _helpers._duration_pb_to_timedelta(gc_rule_pb.max_age)
return MaxAgeGCRule(max_age)
elif rule_name == "union":
return GCRuleUnion([_gc_rule_from_pb(rule) for rule in gc_rule_pb.union.rules])
elif rule_name == "intersection":
rules = [_gc_rule_from_pb(rule) for rule in gc_rule_pb.intersection.rules]
return GCRuleIntersection(rules)
else:
raise ValueError("Unexpected rule name", rule_name)
|
[
"def",
"_gc_rule_from_pb",
"(",
"gc_rule_pb",
")",
":",
"rule_name",
"=",
"gc_rule_pb",
".",
"WhichOneof",
"(",
"\"rule\"",
")",
"if",
"rule_name",
"is",
"None",
":",
"return",
"None",
"if",
"rule_name",
"==",
"\"max_num_versions\"",
":",
"return",
"MaxVersionsGCRule",
"(",
"gc_rule_pb",
".",
"max_num_versions",
")",
"elif",
"rule_name",
"==",
"\"max_age\"",
":",
"max_age",
"=",
"_helpers",
".",
"_duration_pb_to_timedelta",
"(",
"gc_rule_pb",
".",
"max_age",
")",
"return",
"MaxAgeGCRule",
"(",
"max_age",
")",
"elif",
"rule_name",
"==",
"\"union\"",
":",
"return",
"GCRuleUnion",
"(",
"[",
"_gc_rule_from_pb",
"(",
"rule",
")",
"for",
"rule",
"in",
"gc_rule_pb",
".",
"union",
".",
"rules",
"]",
")",
"elif",
"rule_name",
"==",
"\"intersection\"",
":",
"rules",
"=",
"[",
"_gc_rule_from_pb",
"(",
"rule",
")",
"for",
"rule",
"in",
"gc_rule_pb",
".",
"intersection",
".",
"rules",
"]",
"return",
"GCRuleIntersection",
"(",
"rules",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Unexpected rule name\"",
",",
"rule_name",
")"
] |
Convert a protobuf GC rule to a native object.
:type gc_rule_pb: :class:`.table_v2_pb2.GcRule`
:param gc_rule_pb: The GC rule to convert.
:rtype: :class:`GarbageCollectionRule` or :data:`NoneType <types.NoneType>`
:returns: An instance of one of the native rules defined
in :module:`column_family` or :data:`None` if no values were
set on the protobuf passed in.
:raises: :class:`ValueError <exceptions.ValueError>` if the rule name
is unexpected.
|
[
"Convert",
"a",
"protobuf",
"GC",
"rule",
"to",
"a",
"native",
"object",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/column_family.py#L323-L351
|
train
|
googleapis/google-cloud-python
|
bigtable/google/cloud/bigtable/column_family.py
|
MaxAgeGCRule.to_pb
|
def to_pb(self):
"""Converts the garbage collection rule to a protobuf.
:rtype: :class:`.table_v2_pb2.GcRule`
:returns: The converted current object.
"""
max_age = _helpers._timedelta_to_duration_pb(self.max_age)
return table_v2_pb2.GcRule(max_age=max_age)
|
python
|
def to_pb(self):
"""Converts the garbage collection rule to a protobuf.
:rtype: :class:`.table_v2_pb2.GcRule`
:returns: The converted current object.
"""
max_age = _helpers._timedelta_to_duration_pb(self.max_age)
return table_v2_pb2.GcRule(max_age=max_age)
|
[
"def",
"to_pb",
"(",
"self",
")",
":",
"max_age",
"=",
"_helpers",
".",
"_timedelta_to_duration_pb",
"(",
"self",
".",
"max_age",
")",
"return",
"table_v2_pb2",
".",
"GcRule",
"(",
"max_age",
"=",
"max_age",
")"
] |
Converts the garbage collection rule to a protobuf.
:rtype: :class:`.table_v2_pb2.GcRule`
:returns: The converted current object.
|
[
"Converts",
"the",
"garbage",
"collection",
"rule",
"to",
"a",
"protobuf",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/column_family.py#L100-L107
|
train
|
googleapis/google-cloud-python
|
bigtable/google/cloud/bigtable/column_family.py
|
GCRuleUnion.to_pb
|
def to_pb(self):
"""Converts the union into a single GC rule as a protobuf.
:rtype: :class:`.table_v2_pb2.GcRule`
:returns: The converted current object.
"""
union = table_v2_pb2.GcRule.Union(rules=[rule.to_pb() for rule in self.rules])
return table_v2_pb2.GcRule(union=union)
|
python
|
def to_pb(self):
"""Converts the union into a single GC rule as a protobuf.
:rtype: :class:`.table_v2_pb2.GcRule`
:returns: The converted current object.
"""
union = table_v2_pb2.GcRule.Union(rules=[rule.to_pb() for rule in self.rules])
return table_v2_pb2.GcRule(union=union)
|
[
"def",
"to_pb",
"(",
"self",
")",
":",
"union",
"=",
"table_v2_pb2",
".",
"GcRule",
".",
"Union",
"(",
"rules",
"=",
"[",
"rule",
".",
"to_pb",
"(",
")",
"for",
"rule",
"in",
"self",
".",
"rules",
"]",
")",
"return",
"table_v2_pb2",
".",
"GcRule",
"(",
"union",
"=",
"union",
")"
] |
Converts the union into a single GC rule as a protobuf.
:rtype: :class:`.table_v2_pb2.GcRule`
:returns: The converted current object.
|
[
"Converts",
"the",
"union",
"into",
"a",
"single",
"GC",
"rule",
"as",
"a",
"protobuf",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/column_family.py#L134-L141
|
train
|
googleapis/google-cloud-python
|
bigtable/google/cloud/bigtable/column_family.py
|
GCRuleIntersection.to_pb
|
def to_pb(self):
"""Converts the intersection into a single GC rule as a protobuf.
:rtype: :class:`.table_v2_pb2.GcRule`
:returns: The converted current object.
"""
intersection = table_v2_pb2.GcRule.Intersection(
rules=[rule.to_pb() for rule in self.rules]
)
return table_v2_pb2.GcRule(intersection=intersection)
|
python
|
def to_pb(self):
"""Converts the intersection into a single GC rule as a protobuf.
:rtype: :class:`.table_v2_pb2.GcRule`
:returns: The converted current object.
"""
intersection = table_v2_pb2.GcRule.Intersection(
rules=[rule.to_pb() for rule in self.rules]
)
return table_v2_pb2.GcRule(intersection=intersection)
|
[
"def",
"to_pb",
"(",
"self",
")",
":",
"intersection",
"=",
"table_v2_pb2",
".",
"GcRule",
".",
"Intersection",
"(",
"rules",
"=",
"[",
"rule",
".",
"to_pb",
"(",
")",
"for",
"rule",
"in",
"self",
".",
"rules",
"]",
")",
"return",
"table_v2_pb2",
".",
"GcRule",
"(",
"intersection",
"=",
"intersection",
")"
] |
Converts the intersection into a single GC rule as a protobuf.
:rtype: :class:`.table_v2_pb2.GcRule`
:returns: The converted current object.
|
[
"Converts",
"the",
"intersection",
"into",
"a",
"single",
"GC",
"rule",
"as",
"a",
"protobuf",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/column_family.py#L168-L177
|
train
|
googleapis/google-cloud-python
|
bigtable/google/cloud/bigtable/column_family.py
|
ColumnFamily.to_pb
|
def to_pb(self):
"""Converts the column family to a protobuf.
:rtype: :class:`.table_v2_pb2.ColumnFamily`
:returns: The converted current object.
"""
if self.gc_rule is None:
return table_v2_pb2.ColumnFamily()
else:
return table_v2_pb2.ColumnFamily(gc_rule=self.gc_rule.to_pb())
|
python
|
def to_pb(self):
"""Converts the column family to a protobuf.
:rtype: :class:`.table_v2_pb2.ColumnFamily`
:returns: The converted current object.
"""
if self.gc_rule is None:
return table_v2_pb2.ColumnFamily()
else:
return table_v2_pb2.ColumnFamily(gc_rule=self.gc_rule.to_pb())
|
[
"def",
"to_pb",
"(",
"self",
")",
":",
"if",
"self",
".",
"gc_rule",
"is",
"None",
":",
"return",
"table_v2_pb2",
".",
"ColumnFamily",
"(",
")",
"else",
":",
"return",
"table_v2_pb2",
".",
"ColumnFamily",
"(",
"gc_rule",
"=",
"self",
".",
"gc_rule",
".",
"to_pb",
"(",
")",
")"
] |
Converts the column family to a protobuf.
:rtype: :class:`.table_v2_pb2.ColumnFamily`
:returns: The converted current object.
|
[
"Converts",
"the",
"column",
"family",
"to",
"a",
"protobuf",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/column_family.py#L242-L251
|
train
|
googleapis/google-cloud-python
|
bigtable/google/cloud/bigtable/column_family.py
|
ColumnFamily.create
|
def create(self):
"""Create this column family.
For example:
.. literalinclude:: snippets_table.py
:start-after: [START bigtable_create_column_family]
:end-before: [END bigtable_create_column_family]
"""
column_family = self.to_pb()
modification = table_admin_v2_pb2.ModifyColumnFamiliesRequest.Modification(
id=self.column_family_id, create=column_family
)
client = self._table._instance._client
# data it contains are the GC rule and the column family ID already
# stored on this instance.
client.table_admin_client.modify_column_families(
self._table.name, [modification]
)
|
python
|
def create(self):
"""Create this column family.
For example:
.. literalinclude:: snippets_table.py
:start-after: [START bigtable_create_column_family]
:end-before: [END bigtable_create_column_family]
"""
column_family = self.to_pb()
modification = table_admin_v2_pb2.ModifyColumnFamiliesRequest.Modification(
id=self.column_family_id, create=column_family
)
client = self._table._instance._client
# data it contains are the GC rule and the column family ID already
# stored on this instance.
client.table_admin_client.modify_column_families(
self._table.name, [modification]
)
|
[
"def",
"create",
"(",
"self",
")",
":",
"column_family",
"=",
"self",
".",
"to_pb",
"(",
")",
"modification",
"=",
"table_admin_v2_pb2",
".",
"ModifyColumnFamiliesRequest",
".",
"Modification",
"(",
"id",
"=",
"self",
".",
"column_family_id",
",",
"create",
"=",
"column_family",
")",
"client",
"=",
"self",
".",
"_table",
".",
"_instance",
".",
"_client",
"# data it contains are the GC rule and the column family ID already",
"# stored on this instance.",
"client",
".",
"table_admin_client",
".",
"modify_column_families",
"(",
"self",
".",
"_table",
".",
"name",
",",
"[",
"modification",
"]",
")"
] |
Create this column family.
For example:
.. literalinclude:: snippets_table.py
:start-after: [START bigtable_create_column_family]
:end-before: [END bigtable_create_column_family]
|
[
"Create",
"this",
"column",
"family",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/column_family.py#L253-L273
|
train
|
googleapis/google-cloud-python
|
bigtable/google/cloud/bigtable/column_family.py
|
ColumnFamily.delete
|
def delete(self):
"""Delete this column family.
For example:
.. literalinclude:: snippets_table.py
:start-after: [START bigtable_delete_column_family]
:end-before: [END bigtable_delete_column_family]
"""
modification = table_admin_v2_pb2.ModifyColumnFamiliesRequest.Modification(
id=self.column_family_id, drop=True
)
client = self._table._instance._client
# data it contains are the GC rule and the column family ID already
# stored on this instance.
client.table_admin_client.modify_column_families(
self._table.name, [modification]
)
|
python
|
def delete(self):
"""Delete this column family.
For example:
.. literalinclude:: snippets_table.py
:start-after: [START bigtable_delete_column_family]
:end-before: [END bigtable_delete_column_family]
"""
modification = table_admin_v2_pb2.ModifyColumnFamiliesRequest.Modification(
id=self.column_family_id, drop=True
)
client = self._table._instance._client
# data it contains are the GC rule and the column family ID already
# stored on this instance.
client.table_admin_client.modify_column_families(
self._table.name, [modification]
)
|
[
"def",
"delete",
"(",
"self",
")",
":",
"modification",
"=",
"table_admin_v2_pb2",
".",
"ModifyColumnFamiliesRequest",
".",
"Modification",
"(",
"id",
"=",
"self",
".",
"column_family_id",
",",
"drop",
"=",
"True",
")",
"client",
"=",
"self",
".",
"_table",
".",
"_instance",
".",
"_client",
"# data it contains are the GC rule and the column family ID already",
"# stored on this instance.",
"client",
".",
"table_admin_client",
".",
"modify_column_families",
"(",
"self",
".",
"_table",
".",
"name",
",",
"[",
"modification",
"]",
")"
] |
Delete this column family.
For example:
.. literalinclude:: snippets_table.py
:start-after: [START bigtable_delete_column_family]
:end-before: [END bigtable_delete_column_family]
|
[
"Delete",
"this",
"column",
"family",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigtable/google/cloud/bigtable/column_family.py#L301-L320
|
train
|
googleapis/google-cloud-python
|
firestore/google/cloud/firestore_v1beta1/watch.py
|
_maybe_wrap_exception
|
def _maybe_wrap_exception(exception):
"""Wraps a gRPC exception class, if needed."""
if isinstance(exception, grpc.RpcError):
return exceptions.from_grpc_error(exception)
return exception
|
python
|
def _maybe_wrap_exception(exception):
"""Wraps a gRPC exception class, if needed."""
if isinstance(exception, grpc.RpcError):
return exceptions.from_grpc_error(exception)
return exception
|
[
"def",
"_maybe_wrap_exception",
"(",
"exception",
")",
":",
"if",
"isinstance",
"(",
"exception",
",",
"grpc",
".",
"RpcError",
")",
":",
"return",
"exceptions",
".",
"from_grpc_error",
"(",
"exception",
")",
"return",
"exception"
] |
Wraps a gRPC exception class, if needed.
|
[
"Wraps",
"a",
"gRPC",
"exception",
"class",
"if",
"needed",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/firestore/google/cloud/firestore_v1beta1/watch.py#L144-L148
|
train
|
googleapis/google-cloud-python
|
firestore/google/cloud/firestore_v1beta1/watch.py
|
Watch.close
|
def close(self, reason=None):
"""Stop consuming messages and shutdown all helper threads.
This method is idempotent. Additional calls will have no effect.
Args:
reason (Any): The reason to close this. If None, this is considered
an "intentional" shutdown.
"""
with self._closing:
if self._closed:
return
# Stop consuming messages.
if self.is_active:
_LOGGER.debug("Stopping consumer.")
self._consumer.stop()
self._consumer = None
self._rpc.close()
self._rpc = None
self._closed = True
_LOGGER.debug("Finished stopping manager.")
if reason:
# Raise an exception if a reason is provided
_LOGGER.debug("reason for closing: %s" % reason)
if isinstance(reason, Exception):
raise reason
raise RuntimeError(reason)
|
python
|
def close(self, reason=None):
"""Stop consuming messages and shutdown all helper threads.
This method is idempotent. Additional calls will have no effect.
Args:
reason (Any): The reason to close this. If None, this is considered
an "intentional" shutdown.
"""
with self._closing:
if self._closed:
return
# Stop consuming messages.
if self.is_active:
_LOGGER.debug("Stopping consumer.")
self._consumer.stop()
self._consumer = None
self._rpc.close()
self._rpc = None
self._closed = True
_LOGGER.debug("Finished stopping manager.")
if reason:
# Raise an exception if a reason is provided
_LOGGER.debug("reason for closing: %s" % reason)
if isinstance(reason, Exception):
raise reason
raise RuntimeError(reason)
|
[
"def",
"close",
"(",
"self",
",",
"reason",
"=",
"None",
")",
":",
"with",
"self",
".",
"_closing",
":",
"if",
"self",
".",
"_closed",
":",
"return",
"# Stop consuming messages.",
"if",
"self",
".",
"is_active",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Stopping consumer.\"",
")",
"self",
".",
"_consumer",
".",
"stop",
"(",
")",
"self",
".",
"_consumer",
"=",
"None",
"self",
".",
"_rpc",
".",
"close",
"(",
")",
"self",
".",
"_rpc",
"=",
"None",
"self",
".",
"_closed",
"=",
"True",
"_LOGGER",
".",
"debug",
"(",
"\"Finished stopping manager.\"",
")",
"if",
"reason",
":",
"# Raise an exception if a reason is provided",
"_LOGGER",
".",
"debug",
"(",
"\"reason for closing: %s\"",
"%",
"reason",
")",
"if",
"isinstance",
"(",
"reason",
",",
"Exception",
")",
":",
"raise",
"reason",
"raise",
"RuntimeError",
"(",
"reason",
")"
] |
Stop consuming messages and shutdown all helper threads.
This method is idempotent. Additional calls will have no effect.
Args:
reason (Any): The reason to close this. If None, this is considered
an "intentional" shutdown.
|
[
"Stop",
"consuming",
"messages",
"and",
"shutdown",
"all",
"helper",
"threads",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/firestore/google/cloud/firestore_v1beta1/watch.py#L262-L291
|
train
|
googleapis/google-cloud-python
|
firestore/google/cloud/firestore_v1beta1/watch.py
|
Watch._on_rpc_done
|
def _on_rpc_done(self, future):
"""Triggered whenever the underlying RPC terminates without recovery.
This is typically triggered from one of two threads: the background
consumer thread (when calling ``recv()`` produces a non-recoverable
error) or the grpc management thread (when cancelling the RPC).
This method is *non-blocking*. It will start another thread to deal
with shutting everything down. This is to prevent blocking in the
background consumer and preventing it from being ``joined()``.
"""
_LOGGER.info("RPC termination has signaled manager shutdown.")
future = _maybe_wrap_exception(future)
thread = threading.Thread(
name=_RPC_ERROR_THREAD_NAME, target=self.close, kwargs={"reason": future}
)
thread.daemon = True
thread.start()
|
python
|
def _on_rpc_done(self, future):
"""Triggered whenever the underlying RPC terminates without recovery.
This is typically triggered from one of two threads: the background
consumer thread (when calling ``recv()`` produces a non-recoverable
error) or the grpc management thread (when cancelling the RPC).
This method is *non-blocking*. It will start another thread to deal
with shutting everything down. This is to prevent blocking in the
background consumer and preventing it from being ``joined()``.
"""
_LOGGER.info("RPC termination has signaled manager shutdown.")
future = _maybe_wrap_exception(future)
thread = threading.Thread(
name=_RPC_ERROR_THREAD_NAME, target=self.close, kwargs={"reason": future}
)
thread.daemon = True
thread.start()
|
[
"def",
"_on_rpc_done",
"(",
"self",
",",
"future",
")",
":",
"_LOGGER",
".",
"info",
"(",
"\"RPC termination has signaled manager shutdown.\"",
")",
"future",
"=",
"_maybe_wrap_exception",
"(",
"future",
")",
"thread",
"=",
"threading",
".",
"Thread",
"(",
"name",
"=",
"_RPC_ERROR_THREAD_NAME",
",",
"target",
"=",
"self",
".",
"close",
",",
"kwargs",
"=",
"{",
"\"reason\"",
":",
"future",
"}",
")",
"thread",
".",
"daemon",
"=",
"True",
"thread",
".",
"start",
"(",
")"
] |
Triggered whenever the underlying RPC terminates without recovery.
This is typically triggered from one of two threads: the background
consumer thread (when calling ``recv()`` produces a non-recoverable
error) or the grpc management thread (when cancelling the RPC).
This method is *non-blocking*. It will start another thread to deal
with shutting everything down. This is to prevent blocking in the
background consumer and preventing it from being ``joined()``.
|
[
"Triggered",
"whenever",
"the",
"underlying",
"RPC",
"terminates",
"without",
"recovery",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/firestore/google/cloud/firestore_v1beta1/watch.py#L293-L310
|
train
|
googleapis/google-cloud-python
|
firestore/google/cloud/firestore_v1beta1/watch.py
|
Watch.for_document
|
def for_document(
cls,
document_ref,
snapshot_callback,
snapshot_class_instance,
reference_class_instance,
):
"""
Creates a watch snapshot listener for a document. snapshot_callback
receives a DocumentChange object, but may also start to get
targetChange and such soon
Args:
document_ref: Reference to Document
snapshot_callback: callback to be called on snapshot
snapshot_class_instance: instance of DocumentSnapshot to make
snapshots with to pass to snapshot_callback
reference_class_instance: instance of DocumentReference to make
references
"""
return cls(
document_ref,
document_ref._client,
{
"documents": {"documents": [document_ref._document_path]},
"target_id": WATCH_TARGET_ID,
},
document_watch_comparator,
snapshot_callback,
snapshot_class_instance,
reference_class_instance,
)
|
python
|
def for_document(
cls,
document_ref,
snapshot_callback,
snapshot_class_instance,
reference_class_instance,
):
"""
Creates a watch snapshot listener for a document. snapshot_callback
receives a DocumentChange object, but may also start to get
targetChange and such soon
Args:
document_ref: Reference to Document
snapshot_callback: callback to be called on snapshot
snapshot_class_instance: instance of DocumentSnapshot to make
snapshots with to pass to snapshot_callback
reference_class_instance: instance of DocumentReference to make
references
"""
return cls(
document_ref,
document_ref._client,
{
"documents": {"documents": [document_ref._document_path]},
"target_id": WATCH_TARGET_ID,
},
document_watch_comparator,
snapshot_callback,
snapshot_class_instance,
reference_class_instance,
)
|
[
"def",
"for_document",
"(",
"cls",
",",
"document_ref",
",",
"snapshot_callback",
",",
"snapshot_class_instance",
",",
"reference_class_instance",
",",
")",
":",
"return",
"cls",
"(",
"document_ref",
",",
"document_ref",
".",
"_client",
",",
"{",
"\"documents\"",
":",
"{",
"\"documents\"",
":",
"[",
"document_ref",
".",
"_document_path",
"]",
"}",
",",
"\"target_id\"",
":",
"WATCH_TARGET_ID",
",",
"}",
",",
"document_watch_comparator",
",",
"snapshot_callback",
",",
"snapshot_class_instance",
",",
"reference_class_instance",
",",
")"
] |
Creates a watch snapshot listener for a document. snapshot_callback
receives a DocumentChange object, but may also start to get
targetChange and such soon
Args:
document_ref: Reference to Document
snapshot_callback: callback to be called on snapshot
snapshot_class_instance: instance of DocumentSnapshot to make
snapshots with to pass to snapshot_callback
reference_class_instance: instance of DocumentReference to make
references
|
[
"Creates",
"a",
"watch",
"snapshot",
"listener",
"for",
"a",
"document",
".",
"snapshot_callback",
"receives",
"a",
"DocumentChange",
"object",
"but",
"may",
"also",
"start",
"to",
"get",
"targetChange",
"and",
"such",
"soon"
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/firestore/google/cloud/firestore_v1beta1/watch.py#L316-L348
|
train
|
googleapis/google-cloud-python
|
firestore/google/cloud/firestore_v1beta1/watch.py
|
Watch.on_snapshot
|
def on_snapshot(self, proto):
"""
Called everytime there is a response from listen. Collect changes
and 'push' the changes in a batch to the customer when we receive
'current' from the listen response.
Args:
listen_response(`google.cloud.firestore_v1beta1.types.ListenResponse`):
Callback method that receives a object to
"""
TargetChange = firestore_pb2.TargetChange
target_changetype_dispatch = {
TargetChange.NO_CHANGE: self._on_snapshot_target_change_no_change,
TargetChange.ADD: self._on_snapshot_target_change_add,
TargetChange.REMOVE: self._on_snapshot_target_change_remove,
TargetChange.RESET: self._on_snapshot_target_change_reset,
TargetChange.CURRENT: self._on_snapshot_target_change_current,
}
target_change = proto.target_change
if str(target_change):
target_change_type = target_change.target_change_type
_LOGGER.debug("on_snapshot: target change: " + str(target_change_type))
meth = target_changetype_dispatch.get(target_change_type)
if meth is None:
_LOGGER.info(
"on_snapshot: Unknown target change " + str(target_change_type)
)
self.close(
reason="Unknown target change type: %s " % str(target_change_type)
)
else:
try:
meth(proto)
except Exception as exc2:
_LOGGER.debug("meth(proto) exc: " + str(exc2))
raise
# NOTE:
# in other implementations, such as node, the backoff is reset here
# in this version bidi rpc is just used and will control this.
elif str(proto.document_change):
_LOGGER.debug("on_snapshot: document change")
# No other target_ids can show up here, but we still need to see
# if the targetId was in the added list or removed list.
target_ids = proto.document_change.target_ids or []
removed_target_ids = proto.document_change.removed_target_ids or []
changed = False
removed = False
if WATCH_TARGET_ID in target_ids:
changed = True
if WATCH_TARGET_ID in removed_target_ids:
removed = True
if changed:
_LOGGER.debug("on_snapshot: document change: CHANGED")
# google.cloud.firestore_v1beta1.types.DocumentChange
document_change = proto.document_change
# google.cloud.firestore_v1beta1.types.Document
document = document_change.document
data = _helpers.decode_dict(document.fields, self._firestore)
# Create a snapshot. As Document and Query objects can be
# passed we need to get a Document Reference in a more manual
# fashion than self._document_reference
document_name = document.name
db_str = self._firestore._database_string
db_str_documents = db_str + "/documents/"
if document_name.startswith(db_str_documents):
document_name = document_name[len(db_str_documents) :]
document_ref = self._firestore.document(document_name)
snapshot = self.DocumentSnapshot(
reference=document_ref,
data=data,
exists=True,
read_time=None,
create_time=document.create_time,
update_time=document.update_time,
)
self.change_map[document.name] = snapshot
elif removed:
_LOGGER.debug("on_snapshot: document change: REMOVED")
document = proto.document_change.document
self.change_map[document.name] = ChangeType.REMOVED
# NB: document_delete and document_remove (as far as we, the client,
# are concerned) are functionally equivalent
elif str(proto.document_delete):
_LOGGER.debug("on_snapshot: document change: DELETE")
name = proto.document_delete.document
self.change_map[name] = ChangeType.REMOVED
elif str(proto.document_remove):
_LOGGER.debug("on_snapshot: document change: REMOVE")
name = proto.document_remove.document
self.change_map[name] = ChangeType.REMOVED
elif proto.filter:
_LOGGER.debug("on_snapshot: filter update")
if proto.filter.count != self._current_size():
# We need to remove all the current results.
self._reset_docs()
# The filter didn't match, so re-issue the query.
# TODO: reset stream method?
# self._reset_stream();
else:
_LOGGER.debug("UNKNOWN TYPE. UHOH")
self.close(reason=ValueError("Unknown listen response type: %s" % proto))
|
python
|
def on_snapshot(self, proto):
"""
Called everytime there is a response from listen. Collect changes
and 'push' the changes in a batch to the customer when we receive
'current' from the listen response.
Args:
listen_response(`google.cloud.firestore_v1beta1.types.ListenResponse`):
Callback method that receives a object to
"""
TargetChange = firestore_pb2.TargetChange
target_changetype_dispatch = {
TargetChange.NO_CHANGE: self._on_snapshot_target_change_no_change,
TargetChange.ADD: self._on_snapshot_target_change_add,
TargetChange.REMOVE: self._on_snapshot_target_change_remove,
TargetChange.RESET: self._on_snapshot_target_change_reset,
TargetChange.CURRENT: self._on_snapshot_target_change_current,
}
target_change = proto.target_change
if str(target_change):
target_change_type = target_change.target_change_type
_LOGGER.debug("on_snapshot: target change: " + str(target_change_type))
meth = target_changetype_dispatch.get(target_change_type)
if meth is None:
_LOGGER.info(
"on_snapshot: Unknown target change " + str(target_change_type)
)
self.close(
reason="Unknown target change type: %s " % str(target_change_type)
)
else:
try:
meth(proto)
except Exception as exc2:
_LOGGER.debug("meth(proto) exc: " + str(exc2))
raise
# NOTE:
# in other implementations, such as node, the backoff is reset here
# in this version bidi rpc is just used and will control this.
elif str(proto.document_change):
_LOGGER.debug("on_snapshot: document change")
# No other target_ids can show up here, but we still need to see
# if the targetId was in the added list or removed list.
target_ids = proto.document_change.target_ids or []
removed_target_ids = proto.document_change.removed_target_ids or []
changed = False
removed = False
if WATCH_TARGET_ID in target_ids:
changed = True
if WATCH_TARGET_ID in removed_target_ids:
removed = True
if changed:
_LOGGER.debug("on_snapshot: document change: CHANGED")
# google.cloud.firestore_v1beta1.types.DocumentChange
document_change = proto.document_change
# google.cloud.firestore_v1beta1.types.Document
document = document_change.document
data = _helpers.decode_dict(document.fields, self._firestore)
# Create a snapshot. As Document and Query objects can be
# passed we need to get a Document Reference in a more manual
# fashion than self._document_reference
document_name = document.name
db_str = self._firestore._database_string
db_str_documents = db_str + "/documents/"
if document_name.startswith(db_str_documents):
document_name = document_name[len(db_str_documents) :]
document_ref = self._firestore.document(document_name)
snapshot = self.DocumentSnapshot(
reference=document_ref,
data=data,
exists=True,
read_time=None,
create_time=document.create_time,
update_time=document.update_time,
)
self.change_map[document.name] = snapshot
elif removed:
_LOGGER.debug("on_snapshot: document change: REMOVED")
document = proto.document_change.document
self.change_map[document.name] = ChangeType.REMOVED
# NB: document_delete and document_remove (as far as we, the client,
# are concerned) are functionally equivalent
elif str(proto.document_delete):
_LOGGER.debug("on_snapshot: document change: DELETE")
name = proto.document_delete.document
self.change_map[name] = ChangeType.REMOVED
elif str(proto.document_remove):
_LOGGER.debug("on_snapshot: document change: REMOVE")
name = proto.document_remove.document
self.change_map[name] = ChangeType.REMOVED
elif proto.filter:
_LOGGER.debug("on_snapshot: filter update")
if proto.filter.count != self._current_size():
# We need to remove all the current results.
self._reset_docs()
# The filter didn't match, so re-issue the query.
# TODO: reset stream method?
# self._reset_stream();
else:
_LOGGER.debug("UNKNOWN TYPE. UHOH")
self.close(reason=ValueError("Unknown listen response type: %s" % proto))
|
[
"def",
"on_snapshot",
"(",
"self",
",",
"proto",
")",
":",
"TargetChange",
"=",
"firestore_pb2",
".",
"TargetChange",
"target_changetype_dispatch",
"=",
"{",
"TargetChange",
".",
"NO_CHANGE",
":",
"self",
".",
"_on_snapshot_target_change_no_change",
",",
"TargetChange",
".",
"ADD",
":",
"self",
".",
"_on_snapshot_target_change_add",
",",
"TargetChange",
".",
"REMOVE",
":",
"self",
".",
"_on_snapshot_target_change_remove",
",",
"TargetChange",
".",
"RESET",
":",
"self",
".",
"_on_snapshot_target_change_reset",
",",
"TargetChange",
".",
"CURRENT",
":",
"self",
".",
"_on_snapshot_target_change_current",
",",
"}",
"target_change",
"=",
"proto",
".",
"target_change",
"if",
"str",
"(",
"target_change",
")",
":",
"target_change_type",
"=",
"target_change",
".",
"target_change_type",
"_LOGGER",
".",
"debug",
"(",
"\"on_snapshot: target change: \"",
"+",
"str",
"(",
"target_change_type",
")",
")",
"meth",
"=",
"target_changetype_dispatch",
".",
"get",
"(",
"target_change_type",
")",
"if",
"meth",
"is",
"None",
":",
"_LOGGER",
".",
"info",
"(",
"\"on_snapshot: Unknown target change \"",
"+",
"str",
"(",
"target_change_type",
")",
")",
"self",
".",
"close",
"(",
"reason",
"=",
"\"Unknown target change type: %s \"",
"%",
"str",
"(",
"target_change_type",
")",
")",
"else",
":",
"try",
":",
"meth",
"(",
"proto",
")",
"except",
"Exception",
"as",
"exc2",
":",
"_LOGGER",
".",
"debug",
"(",
"\"meth(proto) exc: \"",
"+",
"str",
"(",
"exc2",
")",
")",
"raise",
"# NOTE:",
"# in other implementations, such as node, the backoff is reset here",
"# in this version bidi rpc is just used and will control this.",
"elif",
"str",
"(",
"proto",
".",
"document_change",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"on_snapshot: document change\"",
")",
"# No other target_ids can show up here, but we still need to see",
"# if the targetId was in the added list or removed list.",
"target_ids",
"=",
"proto",
".",
"document_change",
".",
"target_ids",
"or",
"[",
"]",
"removed_target_ids",
"=",
"proto",
".",
"document_change",
".",
"removed_target_ids",
"or",
"[",
"]",
"changed",
"=",
"False",
"removed",
"=",
"False",
"if",
"WATCH_TARGET_ID",
"in",
"target_ids",
":",
"changed",
"=",
"True",
"if",
"WATCH_TARGET_ID",
"in",
"removed_target_ids",
":",
"removed",
"=",
"True",
"if",
"changed",
":",
"_LOGGER",
".",
"debug",
"(",
"\"on_snapshot: document change: CHANGED\"",
")",
"# google.cloud.firestore_v1beta1.types.DocumentChange",
"document_change",
"=",
"proto",
".",
"document_change",
"# google.cloud.firestore_v1beta1.types.Document",
"document",
"=",
"document_change",
".",
"document",
"data",
"=",
"_helpers",
".",
"decode_dict",
"(",
"document",
".",
"fields",
",",
"self",
".",
"_firestore",
")",
"# Create a snapshot. As Document and Query objects can be",
"# passed we need to get a Document Reference in a more manual",
"# fashion than self._document_reference",
"document_name",
"=",
"document",
".",
"name",
"db_str",
"=",
"self",
".",
"_firestore",
".",
"_database_string",
"db_str_documents",
"=",
"db_str",
"+",
"\"/documents/\"",
"if",
"document_name",
".",
"startswith",
"(",
"db_str_documents",
")",
":",
"document_name",
"=",
"document_name",
"[",
"len",
"(",
"db_str_documents",
")",
":",
"]",
"document_ref",
"=",
"self",
".",
"_firestore",
".",
"document",
"(",
"document_name",
")",
"snapshot",
"=",
"self",
".",
"DocumentSnapshot",
"(",
"reference",
"=",
"document_ref",
",",
"data",
"=",
"data",
",",
"exists",
"=",
"True",
",",
"read_time",
"=",
"None",
",",
"create_time",
"=",
"document",
".",
"create_time",
",",
"update_time",
"=",
"document",
".",
"update_time",
",",
")",
"self",
".",
"change_map",
"[",
"document",
".",
"name",
"]",
"=",
"snapshot",
"elif",
"removed",
":",
"_LOGGER",
".",
"debug",
"(",
"\"on_snapshot: document change: REMOVED\"",
")",
"document",
"=",
"proto",
".",
"document_change",
".",
"document",
"self",
".",
"change_map",
"[",
"document",
".",
"name",
"]",
"=",
"ChangeType",
".",
"REMOVED",
"# NB: document_delete and document_remove (as far as we, the client,",
"# are concerned) are functionally equivalent",
"elif",
"str",
"(",
"proto",
".",
"document_delete",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"on_snapshot: document change: DELETE\"",
")",
"name",
"=",
"proto",
".",
"document_delete",
".",
"document",
"self",
".",
"change_map",
"[",
"name",
"]",
"=",
"ChangeType",
".",
"REMOVED",
"elif",
"str",
"(",
"proto",
".",
"document_remove",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"on_snapshot: document change: REMOVE\"",
")",
"name",
"=",
"proto",
".",
"document_remove",
".",
"document",
"self",
".",
"change_map",
"[",
"name",
"]",
"=",
"ChangeType",
".",
"REMOVED",
"elif",
"proto",
".",
"filter",
":",
"_LOGGER",
".",
"debug",
"(",
"\"on_snapshot: filter update\"",
")",
"if",
"proto",
".",
"filter",
".",
"count",
"!=",
"self",
".",
"_current_size",
"(",
")",
":",
"# We need to remove all the current results.",
"self",
".",
"_reset_docs",
"(",
")",
"# The filter didn't match, so re-issue the query.",
"# TODO: reset stream method?",
"# self._reset_stream();",
"else",
":",
"_LOGGER",
".",
"debug",
"(",
"\"UNKNOWN TYPE. UHOH\"",
")",
"self",
".",
"close",
"(",
"reason",
"=",
"ValueError",
"(",
"\"Unknown listen response type: %s\"",
"%",
"proto",
")",
")"
] |
Called everytime there is a response from listen. Collect changes
and 'push' the changes in a batch to the customer when we receive
'current' from the listen response.
Args:
listen_response(`google.cloud.firestore_v1beta1.types.ListenResponse`):
Callback method that receives a object to
|
[
"Called",
"everytime",
"there",
"is",
"a",
"response",
"from",
"listen",
".",
"Collect",
"changes",
"and",
"push",
"the",
"changes",
"in",
"a",
"batch",
"to",
"the",
"customer",
"when",
"we",
"receive",
"current",
"from",
"the",
"listen",
"response",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/firestore/google/cloud/firestore_v1beta1/watch.py#L408-L527
|
train
|
googleapis/google-cloud-python
|
firestore/google/cloud/firestore_v1beta1/watch.py
|
Watch.push
|
def push(self, read_time, next_resume_token):
"""
Assembles a new snapshot from the current set of changes and invokes
the user's callback. Clears the current changes on completion.
"""
deletes, adds, updates = Watch._extract_changes(
self.doc_map, self.change_map, read_time
)
updated_tree, updated_map, appliedChanges = self._compute_snapshot(
self.doc_tree, self.doc_map, deletes, adds, updates
)
if not self.has_pushed or len(appliedChanges):
# TODO: It is possible in the future we will have the tree order
# on insert. For now, we sort here.
key = functools.cmp_to_key(self._comparator)
keys = sorted(updated_tree.keys(), key=key)
self._snapshot_callback(
keys,
appliedChanges,
datetime.datetime.fromtimestamp(read_time.seconds, pytz.utc),
)
self.has_pushed = True
self.doc_tree = updated_tree
self.doc_map = updated_map
self.change_map.clear()
self.resume_token = next_resume_token
|
python
|
def push(self, read_time, next_resume_token):
"""
Assembles a new snapshot from the current set of changes and invokes
the user's callback. Clears the current changes on completion.
"""
deletes, adds, updates = Watch._extract_changes(
self.doc_map, self.change_map, read_time
)
updated_tree, updated_map, appliedChanges = self._compute_snapshot(
self.doc_tree, self.doc_map, deletes, adds, updates
)
if not self.has_pushed or len(appliedChanges):
# TODO: It is possible in the future we will have the tree order
# on insert. For now, we sort here.
key = functools.cmp_to_key(self._comparator)
keys = sorted(updated_tree.keys(), key=key)
self._snapshot_callback(
keys,
appliedChanges,
datetime.datetime.fromtimestamp(read_time.seconds, pytz.utc),
)
self.has_pushed = True
self.doc_tree = updated_tree
self.doc_map = updated_map
self.change_map.clear()
self.resume_token = next_resume_token
|
[
"def",
"push",
"(",
"self",
",",
"read_time",
",",
"next_resume_token",
")",
":",
"deletes",
",",
"adds",
",",
"updates",
"=",
"Watch",
".",
"_extract_changes",
"(",
"self",
".",
"doc_map",
",",
"self",
".",
"change_map",
",",
"read_time",
")",
"updated_tree",
",",
"updated_map",
",",
"appliedChanges",
"=",
"self",
".",
"_compute_snapshot",
"(",
"self",
".",
"doc_tree",
",",
"self",
".",
"doc_map",
",",
"deletes",
",",
"adds",
",",
"updates",
")",
"if",
"not",
"self",
".",
"has_pushed",
"or",
"len",
"(",
"appliedChanges",
")",
":",
"# TODO: It is possible in the future we will have the tree order",
"# on insert. For now, we sort here.",
"key",
"=",
"functools",
".",
"cmp_to_key",
"(",
"self",
".",
"_comparator",
")",
"keys",
"=",
"sorted",
"(",
"updated_tree",
".",
"keys",
"(",
")",
",",
"key",
"=",
"key",
")",
"self",
".",
"_snapshot_callback",
"(",
"keys",
",",
"appliedChanges",
",",
"datetime",
".",
"datetime",
".",
"fromtimestamp",
"(",
"read_time",
".",
"seconds",
",",
"pytz",
".",
"utc",
")",
",",
")",
"self",
".",
"has_pushed",
"=",
"True",
"self",
".",
"doc_tree",
"=",
"updated_tree",
"self",
".",
"doc_map",
"=",
"updated_map",
"self",
".",
"change_map",
".",
"clear",
"(",
")",
"self",
".",
"resume_token",
"=",
"next_resume_token"
] |
Assembles a new snapshot from the current set of changes and invokes
the user's callback. Clears the current changes on completion.
|
[
"Assembles",
"a",
"new",
"snapshot",
"from",
"the",
"current",
"set",
"of",
"changes",
"and",
"invokes",
"the",
"user",
"s",
"callback",
".",
"Clears",
"the",
"current",
"changes",
"on",
"completion",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/firestore/google/cloud/firestore_v1beta1/watch.py#L529-L558
|
train
|
googleapis/google-cloud-python
|
firestore/google/cloud/firestore_v1beta1/watch.py
|
Watch._current_size
|
def _current_size(self):
"""
Returns the current count of all documents, including the changes from
the current changeMap.
"""
deletes, adds, _ = Watch._extract_changes(self.doc_map, self.change_map, None)
return len(self.doc_map) + len(adds) - len(deletes)
|
python
|
def _current_size(self):
"""
Returns the current count of all documents, including the changes from
the current changeMap.
"""
deletes, adds, _ = Watch._extract_changes(self.doc_map, self.change_map, None)
return len(self.doc_map) + len(adds) - len(deletes)
|
[
"def",
"_current_size",
"(",
"self",
")",
":",
"deletes",
",",
"adds",
",",
"_",
"=",
"Watch",
".",
"_extract_changes",
"(",
"self",
".",
"doc_map",
",",
"self",
".",
"change_map",
",",
"None",
")",
"return",
"len",
"(",
"self",
".",
"doc_map",
")",
"+",
"len",
"(",
"adds",
")",
"-",
"len",
"(",
"deletes",
")"
] |
Returns the current count of all documents, including the changes from
the current changeMap.
|
[
"Returns",
"the",
"current",
"count",
"of",
"all",
"documents",
"including",
"the",
"changes",
"from",
"the",
"current",
"changeMap",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/firestore/google/cloud/firestore_v1beta1/watch.py#L700-L706
|
train
|
googleapis/google-cloud-python
|
firestore/google/cloud/firestore_v1beta1/watch.py
|
Watch._reset_docs
|
def _reset_docs(self):
"""
Helper to clear the docs on RESET or filter mismatch.
"""
_LOGGER.debug("resetting documents")
self.change_map.clear()
self.resume_token = None
# Mark each document as deleted. If documents are not deleted
# they will be sent again by the server.
for snapshot in self.doc_tree.keys():
name = snapshot.reference._document_path
self.change_map[name] = ChangeType.REMOVED
self.current = False
|
python
|
def _reset_docs(self):
"""
Helper to clear the docs on RESET or filter mismatch.
"""
_LOGGER.debug("resetting documents")
self.change_map.clear()
self.resume_token = None
# Mark each document as deleted. If documents are not deleted
# they will be sent again by the server.
for snapshot in self.doc_tree.keys():
name = snapshot.reference._document_path
self.change_map[name] = ChangeType.REMOVED
self.current = False
|
[
"def",
"_reset_docs",
"(",
"self",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"resetting documents\"",
")",
"self",
".",
"change_map",
".",
"clear",
"(",
")",
"self",
".",
"resume_token",
"=",
"None",
"# Mark each document as deleted. If documents are not deleted",
"# they will be sent again by the server.",
"for",
"snapshot",
"in",
"self",
".",
"doc_tree",
".",
"keys",
"(",
")",
":",
"name",
"=",
"snapshot",
".",
"reference",
".",
"_document_path",
"self",
".",
"change_map",
"[",
"name",
"]",
"=",
"ChangeType",
".",
"REMOVED",
"self",
".",
"current",
"=",
"False"
] |
Helper to clear the docs on RESET or filter mismatch.
|
[
"Helper",
"to",
"clear",
"the",
"docs",
"on",
"RESET",
"or",
"filter",
"mismatch",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/firestore/google/cloud/firestore_v1beta1/watch.py#L708-L722
|
train
|
googleapis/google-cloud-python
|
core/google/cloud/_http.py
|
JSONConnection.build_api_url
|
def build_api_url(
cls, path, query_params=None, api_base_url=None, api_version=None
):
"""Construct an API url given a few components, some optional.
Typically, you shouldn't need to use this method.
:type path: str
:param path: The path to the resource (ie, ``'/b/bucket-name'``).
:type query_params: dict or list
:param query_params: A dictionary of keys and values (or list of
key-value pairs) to insert into the query
string of the URL.
:type api_base_url: str
:param api_base_url: The base URL for the API endpoint.
Typically you won't have to provide this.
:type api_version: str
:param api_version: The version of the API to call.
Typically you shouldn't provide this and instead
use the default for the library.
:rtype: str
:returns: The URL assembled from the pieces provided.
"""
url = cls.API_URL_TEMPLATE.format(
api_base_url=(api_base_url or cls.API_BASE_URL),
api_version=(api_version or cls.API_VERSION),
path=path,
)
query_params = query_params or {}
if query_params:
url += "?" + urlencode(query_params, doseq=True)
return url
|
python
|
def build_api_url(
cls, path, query_params=None, api_base_url=None, api_version=None
):
"""Construct an API url given a few components, some optional.
Typically, you shouldn't need to use this method.
:type path: str
:param path: The path to the resource (ie, ``'/b/bucket-name'``).
:type query_params: dict or list
:param query_params: A dictionary of keys and values (or list of
key-value pairs) to insert into the query
string of the URL.
:type api_base_url: str
:param api_base_url: The base URL for the API endpoint.
Typically you won't have to provide this.
:type api_version: str
:param api_version: The version of the API to call.
Typically you shouldn't provide this and instead
use the default for the library.
:rtype: str
:returns: The URL assembled from the pieces provided.
"""
url = cls.API_URL_TEMPLATE.format(
api_base_url=(api_base_url or cls.API_BASE_URL),
api_version=(api_version or cls.API_VERSION),
path=path,
)
query_params = query_params or {}
if query_params:
url += "?" + urlencode(query_params, doseq=True)
return url
|
[
"def",
"build_api_url",
"(",
"cls",
",",
"path",
",",
"query_params",
"=",
"None",
",",
"api_base_url",
"=",
"None",
",",
"api_version",
"=",
"None",
")",
":",
"url",
"=",
"cls",
".",
"API_URL_TEMPLATE",
".",
"format",
"(",
"api_base_url",
"=",
"(",
"api_base_url",
"or",
"cls",
".",
"API_BASE_URL",
")",
",",
"api_version",
"=",
"(",
"api_version",
"or",
"cls",
".",
"API_VERSION",
")",
",",
"path",
"=",
"path",
",",
")",
"query_params",
"=",
"query_params",
"or",
"{",
"}",
"if",
"query_params",
":",
"url",
"+=",
"\"?\"",
"+",
"urlencode",
"(",
"query_params",
",",
"doseq",
"=",
"True",
")",
"return",
"url"
] |
Construct an API url given a few components, some optional.
Typically, you shouldn't need to use this method.
:type path: str
:param path: The path to the resource (ie, ``'/b/bucket-name'``).
:type query_params: dict or list
:param query_params: A dictionary of keys and values (or list of
key-value pairs) to insert into the query
string of the URL.
:type api_base_url: str
:param api_base_url: The base URL for the API endpoint.
Typically you won't have to provide this.
:type api_version: str
:param api_version: The version of the API to call.
Typically you shouldn't provide this and instead
use the default for the library.
:rtype: str
:returns: The URL assembled from the pieces provided.
|
[
"Construct",
"an",
"API",
"url",
"given",
"a",
"few",
"components",
"some",
"optional",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/core/google/cloud/_http.py#L105-L142
|
train
|
googleapis/google-cloud-python
|
core/google/cloud/_http.py
|
JSONConnection._make_request
|
def _make_request(
self,
method,
url,
data=None,
content_type=None,
headers=None,
target_object=None,
):
"""A low level method to send a request to the API.
Typically, you shouldn't need to use this method.
:type method: str
:param method: The HTTP method to use in the request.
:type url: str
:param url: The URL to send the request to.
:type data: str
:param data: The data to send as the body of the request.
:type content_type: str
:param content_type: The proper MIME type of the data provided.
:type headers: dict
:param headers: (Optional) A dictionary of HTTP headers to send with
the request. If passed, will be modified directly
here with added headers.
:type target_object: object
:param target_object:
(Optional) Argument to be used by library callers. This can allow
custom behavior, for example, to defer an HTTP request and complete
initialization of the object at a later time.
:rtype: :class:`requests.Response`
:returns: The HTTP response.
"""
headers = headers or {}
headers.update(self._EXTRA_HEADERS)
headers["Accept-Encoding"] = "gzip"
if content_type:
headers["Content-Type"] = content_type
headers["User-Agent"] = self.USER_AGENT
return self._do_request(method, url, headers, data, target_object)
|
python
|
def _make_request(
self,
method,
url,
data=None,
content_type=None,
headers=None,
target_object=None,
):
"""A low level method to send a request to the API.
Typically, you shouldn't need to use this method.
:type method: str
:param method: The HTTP method to use in the request.
:type url: str
:param url: The URL to send the request to.
:type data: str
:param data: The data to send as the body of the request.
:type content_type: str
:param content_type: The proper MIME type of the data provided.
:type headers: dict
:param headers: (Optional) A dictionary of HTTP headers to send with
the request. If passed, will be modified directly
here with added headers.
:type target_object: object
:param target_object:
(Optional) Argument to be used by library callers. This can allow
custom behavior, for example, to defer an HTTP request and complete
initialization of the object at a later time.
:rtype: :class:`requests.Response`
:returns: The HTTP response.
"""
headers = headers or {}
headers.update(self._EXTRA_HEADERS)
headers["Accept-Encoding"] = "gzip"
if content_type:
headers["Content-Type"] = content_type
headers["User-Agent"] = self.USER_AGENT
return self._do_request(method, url, headers, data, target_object)
|
[
"def",
"_make_request",
"(",
"self",
",",
"method",
",",
"url",
",",
"data",
"=",
"None",
",",
"content_type",
"=",
"None",
",",
"headers",
"=",
"None",
",",
"target_object",
"=",
"None",
",",
")",
":",
"headers",
"=",
"headers",
"or",
"{",
"}",
"headers",
".",
"update",
"(",
"self",
".",
"_EXTRA_HEADERS",
")",
"headers",
"[",
"\"Accept-Encoding\"",
"]",
"=",
"\"gzip\"",
"if",
"content_type",
":",
"headers",
"[",
"\"Content-Type\"",
"]",
"=",
"content_type",
"headers",
"[",
"\"User-Agent\"",
"]",
"=",
"self",
".",
"USER_AGENT",
"return",
"self",
".",
"_do_request",
"(",
"method",
",",
"url",
",",
"headers",
",",
"data",
",",
"target_object",
")"
] |
A low level method to send a request to the API.
Typically, you shouldn't need to use this method.
:type method: str
:param method: The HTTP method to use in the request.
:type url: str
:param url: The URL to send the request to.
:type data: str
:param data: The data to send as the body of the request.
:type content_type: str
:param content_type: The proper MIME type of the data provided.
:type headers: dict
:param headers: (Optional) A dictionary of HTTP headers to send with
the request. If passed, will be modified directly
here with added headers.
:type target_object: object
:param target_object:
(Optional) Argument to be used by library callers. This can allow
custom behavior, for example, to defer an HTTP request and complete
initialization of the object at a later time.
:rtype: :class:`requests.Response`
:returns: The HTTP response.
|
[
"A",
"low",
"level",
"method",
"to",
"send",
"a",
"request",
"to",
"the",
"API",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/core/google/cloud/_http.py#L144-L192
|
train
|
googleapis/google-cloud-python
|
core/google/cloud/_http.py
|
JSONConnection._do_request
|
def _do_request(
self, method, url, headers, data, target_object
): # pylint: disable=unused-argument
"""Low-level helper: perform the actual API request over HTTP.
Allows batch context managers to override and defer a request.
:type method: str
:param method: The HTTP method to use in the request.
:type url: str
:param url: The URL to send the request to.
:type headers: dict
:param headers: A dictionary of HTTP headers to send with the request.
:type data: str
:param data: The data to send as the body of the request.
:type target_object: object
:param target_object:
(Optional) Unused ``target_object`` here but may be used by a
superclass.
:rtype: :class:`requests.Response`
:returns: The HTTP response.
"""
return self.http.request(url=url, method=method, headers=headers, data=data)
|
python
|
def _do_request(
self, method, url, headers, data, target_object
): # pylint: disable=unused-argument
"""Low-level helper: perform the actual API request over HTTP.
Allows batch context managers to override and defer a request.
:type method: str
:param method: The HTTP method to use in the request.
:type url: str
:param url: The URL to send the request to.
:type headers: dict
:param headers: A dictionary of HTTP headers to send with the request.
:type data: str
:param data: The data to send as the body of the request.
:type target_object: object
:param target_object:
(Optional) Unused ``target_object`` here but may be used by a
superclass.
:rtype: :class:`requests.Response`
:returns: The HTTP response.
"""
return self.http.request(url=url, method=method, headers=headers, data=data)
|
[
"def",
"_do_request",
"(",
"self",
",",
"method",
",",
"url",
",",
"headers",
",",
"data",
",",
"target_object",
")",
":",
"# pylint: disable=unused-argument",
"return",
"self",
".",
"http",
".",
"request",
"(",
"url",
"=",
"url",
",",
"method",
"=",
"method",
",",
"headers",
"=",
"headers",
",",
"data",
"=",
"data",
")"
] |
Low-level helper: perform the actual API request over HTTP.
Allows batch context managers to override and defer a request.
:type method: str
:param method: The HTTP method to use in the request.
:type url: str
:param url: The URL to send the request to.
:type headers: dict
:param headers: A dictionary of HTTP headers to send with the request.
:type data: str
:param data: The data to send as the body of the request.
:type target_object: object
:param target_object:
(Optional) Unused ``target_object`` here but may be used by a
superclass.
:rtype: :class:`requests.Response`
:returns: The HTTP response.
|
[
"Low",
"-",
"level",
"helper",
":",
"perform",
"the",
"actual",
"API",
"request",
"over",
"HTTP",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/core/google/cloud/_http.py#L194-L221
|
train
|
googleapis/google-cloud-python
|
core/google/cloud/_http.py
|
JSONConnection.api_request
|
def api_request(
self,
method,
path,
query_params=None,
data=None,
content_type=None,
headers=None,
api_base_url=None,
api_version=None,
expect_json=True,
_target_object=None,
):
"""Make a request over the HTTP transport to the API.
You shouldn't need to use this method, but if you plan to
interact with the API using these primitives, this is the
correct one to use.
:type method: str
:param method: The HTTP method name (ie, ``GET``, ``POST``, etc).
Required.
:type path: str
:param path: The path to the resource (ie, ``'/b/bucket-name'``).
Required.
:type query_params: dict or list
:param query_params: A dictionary of keys and values (or list of
key-value pairs) to insert into the query
string of the URL.
:type data: str
:param data: The data to send as the body of the request. Default is
the empty string.
:type content_type: str
:param content_type: The proper MIME type of the data provided. Default
is None.
:type headers: dict
:param headers: extra HTTP headers to be sent with the request.
:type api_base_url: str
:param api_base_url: The base URL for the API endpoint.
Typically you won't have to provide this.
Default is the standard API base URL.
:type api_version: str
:param api_version: The version of the API to call. Typically
you shouldn't provide this and instead use
the default for the library. Default is the
latest API version supported by
google-cloud-python.
:type expect_json: bool
:param expect_json: If True, this method will try to parse the
response as JSON and raise an exception if
that cannot be done. Default is True.
:type _target_object: :class:`object`
:param _target_object:
(Optional) Protected argument to be used by library callers. This
can allow custom behavior, for example, to defer an HTTP request
and complete initialization of the object at a later time.
:raises ~google.cloud.exceptions.GoogleCloudError: if the response code
is not 200 OK.
:raises ValueError: if the response content type is not JSON.
:rtype: dict or str
:returns: The API response payload, either as a raw string or
a dictionary if the response is valid JSON.
"""
url = self.build_api_url(
path=path,
query_params=query_params,
api_base_url=api_base_url,
api_version=api_version,
)
# Making the executive decision that any dictionary
# data will be sent properly as JSON.
if data and isinstance(data, dict):
data = json.dumps(data)
content_type = "application/json"
response = self._make_request(
method=method,
url=url,
data=data,
content_type=content_type,
headers=headers,
target_object=_target_object,
)
if not 200 <= response.status_code < 300:
raise exceptions.from_http_response(response)
if expect_json and response.content:
return response.json()
else:
return response.content
|
python
|
def api_request(
self,
method,
path,
query_params=None,
data=None,
content_type=None,
headers=None,
api_base_url=None,
api_version=None,
expect_json=True,
_target_object=None,
):
"""Make a request over the HTTP transport to the API.
You shouldn't need to use this method, but if you plan to
interact with the API using these primitives, this is the
correct one to use.
:type method: str
:param method: The HTTP method name (ie, ``GET``, ``POST``, etc).
Required.
:type path: str
:param path: The path to the resource (ie, ``'/b/bucket-name'``).
Required.
:type query_params: dict or list
:param query_params: A dictionary of keys and values (or list of
key-value pairs) to insert into the query
string of the URL.
:type data: str
:param data: The data to send as the body of the request. Default is
the empty string.
:type content_type: str
:param content_type: The proper MIME type of the data provided. Default
is None.
:type headers: dict
:param headers: extra HTTP headers to be sent with the request.
:type api_base_url: str
:param api_base_url: The base URL for the API endpoint.
Typically you won't have to provide this.
Default is the standard API base URL.
:type api_version: str
:param api_version: The version of the API to call. Typically
you shouldn't provide this and instead use
the default for the library. Default is the
latest API version supported by
google-cloud-python.
:type expect_json: bool
:param expect_json: If True, this method will try to parse the
response as JSON and raise an exception if
that cannot be done. Default is True.
:type _target_object: :class:`object`
:param _target_object:
(Optional) Protected argument to be used by library callers. This
can allow custom behavior, for example, to defer an HTTP request
and complete initialization of the object at a later time.
:raises ~google.cloud.exceptions.GoogleCloudError: if the response code
is not 200 OK.
:raises ValueError: if the response content type is not JSON.
:rtype: dict or str
:returns: The API response payload, either as a raw string or
a dictionary if the response is valid JSON.
"""
url = self.build_api_url(
path=path,
query_params=query_params,
api_base_url=api_base_url,
api_version=api_version,
)
# Making the executive decision that any dictionary
# data will be sent properly as JSON.
if data and isinstance(data, dict):
data = json.dumps(data)
content_type = "application/json"
response = self._make_request(
method=method,
url=url,
data=data,
content_type=content_type,
headers=headers,
target_object=_target_object,
)
if not 200 <= response.status_code < 300:
raise exceptions.from_http_response(response)
if expect_json and response.content:
return response.json()
else:
return response.content
|
[
"def",
"api_request",
"(",
"self",
",",
"method",
",",
"path",
",",
"query_params",
"=",
"None",
",",
"data",
"=",
"None",
",",
"content_type",
"=",
"None",
",",
"headers",
"=",
"None",
",",
"api_base_url",
"=",
"None",
",",
"api_version",
"=",
"None",
",",
"expect_json",
"=",
"True",
",",
"_target_object",
"=",
"None",
",",
")",
":",
"url",
"=",
"self",
".",
"build_api_url",
"(",
"path",
"=",
"path",
",",
"query_params",
"=",
"query_params",
",",
"api_base_url",
"=",
"api_base_url",
",",
"api_version",
"=",
"api_version",
",",
")",
"# Making the executive decision that any dictionary",
"# data will be sent properly as JSON.",
"if",
"data",
"and",
"isinstance",
"(",
"data",
",",
"dict",
")",
":",
"data",
"=",
"json",
".",
"dumps",
"(",
"data",
")",
"content_type",
"=",
"\"application/json\"",
"response",
"=",
"self",
".",
"_make_request",
"(",
"method",
"=",
"method",
",",
"url",
"=",
"url",
",",
"data",
"=",
"data",
",",
"content_type",
"=",
"content_type",
",",
"headers",
"=",
"headers",
",",
"target_object",
"=",
"_target_object",
",",
")",
"if",
"not",
"200",
"<=",
"response",
".",
"status_code",
"<",
"300",
":",
"raise",
"exceptions",
".",
"from_http_response",
"(",
"response",
")",
"if",
"expect_json",
"and",
"response",
".",
"content",
":",
"return",
"response",
".",
"json",
"(",
")",
"else",
":",
"return",
"response",
".",
"content"
] |
Make a request over the HTTP transport to the API.
You shouldn't need to use this method, but if you plan to
interact with the API using these primitives, this is the
correct one to use.
:type method: str
:param method: The HTTP method name (ie, ``GET``, ``POST``, etc).
Required.
:type path: str
:param path: The path to the resource (ie, ``'/b/bucket-name'``).
Required.
:type query_params: dict or list
:param query_params: A dictionary of keys and values (or list of
key-value pairs) to insert into the query
string of the URL.
:type data: str
:param data: The data to send as the body of the request. Default is
the empty string.
:type content_type: str
:param content_type: The proper MIME type of the data provided. Default
is None.
:type headers: dict
:param headers: extra HTTP headers to be sent with the request.
:type api_base_url: str
:param api_base_url: The base URL for the API endpoint.
Typically you won't have to provide this.
Default is the standard API base URL.
:type api_version: str
:param api_version: The version of the API to call. Typically
you shouldn't provide this and instead use
the default for the library. Default is the
latest API version supported by
google-cloud-python.
:type expect_json: bool
:param expect_json: If True, this method will try to parse the
response as JSON and raise an exception if
that cannot be done. Default is True.
:type _target_object: :class:`object`
:param _target_object:
(Optional) Protected argument to be used by library callers. This
can allow custom behavior, for example, to defer an HTTP request
and complete initialization of the object at a later time.
:raises ~google.cloud.exceptions.GoogleCloudError: if the response code
is not 200 OK.
:raises ValueError: if the response content type is not JSON.
:rtype: dict or str
:returns: The API response payload, either as a raw string or
a dictionary if the response is valid JSON.
|
[
"Make",
"a",
"request",
"over",
"the",
"HTTP",
"transport",
"to",
"the",
"API",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/core/google/cloud/_http.py#L223-L324
|
train
|
googleapis/google-cloud-python
|
monitoring/google/cloud/monitoring_v3/query.py
|
_build_label_filter
|
def _build_label_filter(category, *args, **kwargs):
"""Construct a filter string to filter on metric or resource labels."""
terms = list(args)
for key, value in six.iteritems(kwargs):
if value is None:
continue
suffix = None
if key.endswith(
("_prefix", "_suffix", "_greater", "_greaterequal", "_less", "_lessequal")
):
key, suffix = key.rsplit("_", 1)
if category == "resource" and key == "resource_type":
key = "resource.type"
else:
key = ".".join((category, "label", key))
if suffix == "prefix":
term = '{key} = starts_with("{value}")'
elif suffix == "suffix":
term = '{key} = ends_with("{value}")'
elif suffix == "greater":
term = "{key} > {value}"
elif suffix == "greaterequal":
term = "{key} >= {value}"
elif suffix == "less":
term = "{key} < {value}"
elif suffix == "lessequal":
term = "{key} <= {value}"
else:
term = '{key} = "{value}"'
terms.append(term.format(key=key, value=value))
return " AND ".join(sorted(terms))
|
python
|
def _build_label_filter(category, *args, **kwargs):
"""Construct a filter string to filter on metric or resource labels."""
terms = list(args)
for key, value in six.iteritems(kwargs):
if value is None:
continue
suffix = None
if key.endswith(
("_prefix", "_suffix", "_greater", "_greaterequal", "_less", "_lessequal")
):
key, suffix = key.rsplit("_", 1)
if category == "resource" and key == "resource_type":
key = "resource.type"
else:
key = ".".join((category, "label", key))
if suffix == "prefix":
term = '{key} = starts_with("{value}")'
elif suffix == "suffix":
term = '{key} = ends_with("{value}")'
elif suffix == "greater":
term = "{key} > {value}"
elif suffix == "greaterequal":
term = "{key} >= {value}"
elif suffix == "less":
term = "{key} < {value}"
elif suffix == "lessequal":
term = "{key} <= {value}"
else:
term = '{key} = "{value}"'
terms.append(term.format(key=key, value=value))
return " AND ".join(sorted(terms))
|
[
"def",
"_build_label_filter",
"(",
"category",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"terms",
"=",
"list",
"(",
"args",
")",
"for",
"key",
",",
"value",
"in",
"six",
".",
"iteritems",
"(",
"kwargs",
")",
":",
"if",
"value",
"is",
"None",
":",
"continue",
"suffix",
"=",
"None",
"if",
"key",
".",
"endswith",
"(",
"(",
"\"_prefix\"",
",",
"\"_suffix\"",
",",
"\"_greater\"",
",",
"\"_greaterequal\"",
",",
"\"_less\"",
",",
"\"_lessequal\"",
")",
")",
":",
"key",
",",
"suffix",
"=",
"key",
".",
"rsplit",
"(",
"\"_\"",
",",
"1",
")",
"if",
"category",
"==",
"\"resource\"",
"and",
"key",
"==",
"\"resource_type\"",
":",
"key",
"=",
"\"resource.type\"",
"else",
":",
"key",
"=",
"\".\"",
".",
"join",
"(",
"(",
"category",
",",
"\"label\"",
",",
"key",
")",
")",
"if",
"suffix",
"==",
"\"prefix\"",
":",
"term",
"=",
"'{key} = starts_with(\"{value}\")'",
"elif",
"suffix",
"==",
"\"suffix\"",
":",
"term",
"=",
"'{key} = ends_with(\"{value}\")'",
"elif",
"suffix",
"==",
"\"greater\"",
":",
"term",
"=",
"\"{key} > {value}\"",
"elif",
"suffix",
"==",
"\"greaterequal\"",
":",
"term",
"=",
"\"{key} >= {value}\"",
"elif",
"suffix",
"==",
"\"less\"",
":",
"term",
"=",
"\"{key} < {value}\"",
"elif",
"suffix",
"==",
"\"lessequal\"",
":",
"term",
"=",
"\"{key} <= {value}\"",
"else",
":",
"term",
"=",
"'{key} = \"{value}\"'",
"terms",
".",
"append",
"(",
"term",
".",
"format",
"(",
"key",
"=",
"key",
",",
"value",
"=",
"value",
")",
")",
"return",
"\" AND \"",
".",
"join",
"(",
"sorted",
"(",
"terms",
")",
")"
] |
Construct a filter string to filter on metric or resource labels.
|
[
"Construct",
"a",
"filter",
"string",
"to",
"filter",
"on",
"metric",
"or",
"resource",
"labels",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/monitoring/google/cloud/monitoring_v3/query.py#L592-L627
|
train
|
googleapis/google-cloud-python
|
monitoring/google/cloud/monitoring_v3/query.py
|
Query.select_interval
|
def select_interval(self, end_time, start_time=None):
"""Copy the query and set the query time interval.
Example::
import datetime
now = datetime.datetime.utcnow()
query = query.select_interval(
end_time=now,
start_time=now - datetime.timedelta(minutes=5))
As a convenience, you can alternatively specify the end time and
an interval duration when you create the query initially.
:type end_time: :class:`datetime.datetime`
:param end_time: The end time (inclusive) of the time interval
for which results should be returned, as a datetime object.
:type start_time: :class:`datetime.datetime`
:param start_time:
(Optional) The start time (exclusive) of the time interval
for which results should be returned, as a datetime object.
If not specified, the interval is a point in time.
:rtype: :class:`Query`
:returns: The new query object.
"""
new_query = copy.deepcopy(self)
new_query._end_time = end_time
new_query._start_time = start_time
return new_query
|
python
|
def select_interval(self, end_time, start_time=None):
"""Copy the query and set the query time interval.
Example::
import datetime
now = datetime.datetime.utcnow()
query = query.select_interval(
end_time=now,
start_time=now - datetime.timedelta(minutes=5))
As a convenience, you can alternatively specify the end time and
an interval duration when you create the query initially.
:type end_time: :class:`datetime.datetime`
:param end_time: The end time (inclusive) of the time interval
for which results should be returned, as a datetime object.
:type start_time: :class:`datetime.datetime`
:param start_time:
(Optional) The start time (exclusive) of the time interval
for which results should be returned, as a datetime object.
If not specified, the interval is a point in time.
:rtype: :class:`Query`
:returns: The new query object.
"""
new_query = copy.deepcopy(self)
new_query._end_time = end_time
new_query._start_time = start_time
return new_query
|
[
"def",
"select_interval",
"(",
"self",
",",
"end_time",
",",
"start_time",
"=",
"None",
")",
":",
"new_query",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
")",
"new_query",
".",
"_end_time",
"=",
"end_time",
"new_query",
".",
"_start_time",
"=",
"start_time",
"return",
"new_query"
] |
Copy the query and set the query time interval.
Example::
import datetime
now = datetime.datetime.utcnow()
query = query.select_interval(
end_time=now,
start_time=now - datetime.timedelta(minutes=5))
As a convenience, you can alternatively specify the end time and
an interval duration when you create the query initially.
:type end_time: :class:`datetime.datetime`
:param end_time: The end time (inclusive) of the time interval
for which results should be returned, as a datetime object.
:type start_time: :class:`datetime.datetime`
:param start_time:
(Optional) The start time (exclusive) of the time interval
for which results should be returned, as a datetime object.
If not specified, the interval is a point in time.
:rtype: :class:`Query`
:returns: The new query object.
|
[
"Copy",
"the",
"query",
"and",
"set",
"the",
"query",
"time",
"interval",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/monitoring/google/cloud/monitoring_v3/query.py#L134-L165
|
train
|
googleapis/google-cloud-python
|
monitoring/google/cloud/monitoring_v3/query.py
|
Query.select_group
|
def select_group(self, group_id):
"""Copy the query and add filtering by group.
Example::
query = query.select_group('1234567')
:type group_id: str
:param group_id: The ID of a group to filter by.
:rtype: :class:`Query`
:returns: The new query object.
"""
new_query = copy.deepcopy(self)
new_query._filter.group_id = group_id
return new_query
|
python
|
def select_group(self, group_id):
"""Copy the query and add filtering by group.
Example::
query = query.select_group('1234567')
:type group_id: str
:param group_id: The ID of a group to filter by.
:rtype: :class:`Query`
:returns: The new query object.
"""
new_query = copy.deepcopy(self)
new_query._filter.group_id = group_id
return new_query
|
[
"def",
"select_group",
"(",
"self",
",",
"group_id",
")",
":",
"new_query",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
")",
"new_query",
".",
"_filter",
".",
"group_id",
"=",
"group_id",
"return",
"new_query"
] |
Copy the query and add filtering by group.
Example::
query = query.select_group('1234567')
:type group_id: str
:param group_id: The ID of a group to filter by.
:rtype: :class:`Query`
:returns: The new query object.
|
[
"Copy",
"the",
"query",
"and",
"add",
"filtering",
"by",
"group",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/monitoring/google/cloud/monitoring_v3/query.py#L167-L182
|
train
|
googleapis/google-cloud-python
|
monitoring/google/cloud/monitoring_v3/query.py
|
Query.select_projects
|
def select_projects(self, *args):
"""Copy the query and add filtering by monitored projects.
This is only useful if the target project represents a Stackdriver
account containing the specified monitored projects.
Examples::
query = query.select_projects('project-1')
query = query.select_projects('project-1', 'project-2')
:type args: tuple
:param args: Project IDs limiting the resources to be included
in the query.
:rtype: :class:`Query`
:returns: The new query object.
"""
new_query = copy.deepcopy(self)
new_query._filter.projects = args
return new_query
|
python
|
def select_projects(self, *args):
"""Copy the query and add filtering by monitored projects.
This is only useful if the target project represents a Stackdriver
account containing the specified monitored projects.
Examples::
query = query.select_projects('project-1')
query = query.select_projects('project-1', 'project-2')
:type args: tuple
:param args: Project IDs limiting the resources to be included
in the query.
:rtype: :class:`Query`
:returns: The new query object.
"""
new_query = copy.deepcopy(self)
new_query._filter.projects = args
return new_query
|
[
"def",
"select_projects",
"(",
"self",
",",
"*",
"args",
")",
":",
"new_query",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
")",
"new_query",
".",
"_filter",
".",
"projects",
"=",
"args",
"return",
"new_query"
] |
Copy the query and add filtering by monitored projects.
This is only useful if the target project represents a Stackdriver
account containing the specified monitored projects.
Examples::
query = query.select_projects('project-1')
query = query.select_projects('project-1', 'project-2')
:type args: tuple
:param args: Project IDs limiting the resources to be included
in the query.
:rtype: :class:`Query`
:returns: The new query object.
|
[
"Copy",
"the",
"query",
"and",
"add",
"filtering",
"by",
"monitored",
"projects",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/monitoring/google/cloud/monitoring_v3/query.py#L184-L204
|
train
|
googleapis/google-cloud-python
|
monitoring/google/cloud/monitoring_v3/query.py
|
Query.select_resources
|
def select_resources(self, *args, **kwargs):
"""Copy the query and add filtering by resource labels.
Examples::
query = query.select_resources(zone='us-central1-a')
query = query.select_resources(zone_prefix='europe-')
query = query.select_resources(resource_type='gce_instance')
A keyword argument ``<label>=<value>`` ordinarily generates a filter
expression of the form::
resource.label.<label> = "<value>"
However, by adding ``"_prefix"`` or ``"_suffix"`` to the keyword,
you can specify a partial match.
``<label>_prefix=<value>`` generates::
resource.label.<label> = starts_with("<value>")
``<label>_suffix=<value>`` generates::
resource.label.<label> = ends_with("<value>")
As a special case, ``"resource_type"`` is treated as a special
pseudo-label corresponding to the filter object ``resource.type``.
For example, ``resource_type=<value>`` generates::
resource.type = "<value>"
See the `defined resource types`_.
.. note::
The label ``"instance_name"`` is a metric label,
not a resource label. You would filter on it using
``select_metrics(instance_name=...)``.
:type args: tuple
:param args: Raw filter expression strings to include in the
conjunction. If just one is provided and no keyword arguments
are provided, it can be a disjunction.
:type kwargs: dict
:param kwargs: Label filters to include in the conjunction as
described above.
:rtype: :class:`Query`
:returns: The new query object.
.. _defined resource types:
https://cloud.google.com/monitoring/api/v3/monitored-resources
"""
new_query = copy.deepcopy(self)
new_query._filter.select_resources(*args, **kwargs)
return new_query
|
python
|
def select_resources(self, *args, **kwargs):
"""Copy the query and add filtering by resource labels.
Examples::
query = query.select_resources(zone='us-central1-a')
query = query.select_resources(zone_prefix='europe-')
query = query.select_resources(resource_type='gce_instance')
A keyword argument ``<label>=<value>`` ordinarily generates a filter
expression of the form::
resource.label.<label> = "<value>"
However, by adding ``"_prefix"`` or ``"_suffix"`` to the keyword,
you can specify a partial match.
``<label>_prefix=<value>`` generates::
resource.label.<label> = starts_with("<value>")
``<label>_suffix=<value>`` generates::
resource.label.<label> = ends_with("<value>")
As a special case, ``"resource_type"`` is treated as a special
pseudo-label corresponding to the filter object ``resource.type``.
For example, ``resource_type=<value>`` generates::
resource.type = "<value>"
See the `defined resource types`_.
.. note::
The label ``"instance_name"`` is a metric label,
not a resource label. You would filter on it using
``select_metrics(instance_name=...)``.
:type args: tuple
:param args: Raw filter expression strings to include in the
conjunction. If just one is provided and no keyword arguments
are provided, it can be a disjunction.
:type kwargs: dict
:param kwargs: Label filters to include in the conjunction as
described above.
:rtype: :class:`Query`
:returns: The new query object.
.. _defined resource types:
https://cloud.google.com/monitoring/api/v3/monitored-resources
"""
new_query = copy.deepcopy(self)
new_query._filter.select_resources(*args, **kwargs)
return new_query
|
[
"def",
"select_resources",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"new_query",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
")",
"new_query",
".",
"_filter",
".",
"select_resources",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"new_query"
] |
Copy the query and add filtering by resource labels.
Examples::
query = query.select_resources(zone='us-central1-a')
query = query.select_resources(zone_prefix='europe-')
query = query.select_resources(resource_type='gce_instance')
A keyword argument ``<label>=<value>`` ordinarily generates a filter
expression of the form::
resource.label.<label> = "<value>"
However, by adding ``"_prefix"`` or ``"_suffix"`` to the keyword,
you can specify a partial match.
``<label>_prefix=<value>`` generates::
resource.label.<label> = starts_with("<value>")
``<label>_suffix=<value>`` generates::
resource.label.<label> = ends_with("<value>")
As a special case, ``"resource_type"`` is treated as a special
pseudo-label corresponding to the filter object ``resource.type``.
For example, ``resource_type=<value>`` generates::
resource.type = "<value>"
See the `defined resource types`_.
.. note::
The label ``"instance_name"`` is a metric label,
not a resource label. You would filter on it using
``select_metrics(instance_name=...)``.
:type args: tuple
:param args: Raw filter expression strings to include in the
conjunction. If just one is provided and no keyword arguments
are provided, it can be a disjunction.
:type kwargs: dict
:param kwargs: Label filters to include in the conjunction as
described above.
:rtype: :class:`Query`
:returns: The new query object.
.. _defined resource types:
https://cloud.google.com/monitoring/api/v3/monitored-resources
|
[
"Copy",
"the",
"query",
"and",
"add",
"filtering",
"by",
"resource",
"labels",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/monitoring/google/cloud/monitoring_v3/query.py#L206-L262
|
train
|
googleapis/google-cloud-python
|
monitoring/google/cloud/monitoring_v3/query.py
|
Query.select_metrics
|
def select_metrics(self, *args, **kwargs):
"""Copy the query and add filtering by metric labels.
Examples::
query = query.select_metrics(instance_name='myinstance')
query = query.select_metrics(instance_name_prefix='mycluster-')
A keyword argument ``<label>=<value>`` ordinarily generates a filter
expression of the form::
metric.label.<label> = "<value>"
However, by adding ``"_prefix"`` or ``"_suffix"`` to the keyword,
you can specify a partial match.
``<label>_prefix=<value>`` generates::
metric.label.<label> = starts_with("<value>")
``<label>_suffix=<value>`` generates::
metric.label.<label> = ends_with("<value>")
If the label's value type is ``INT64``, a similar notation can be
used to express inequalities:
``<label>_less=<value>`` generates::
metric.label.<label> < <value>
``<label>_lessequal=<value>`` generates::
metric.label.<label> <= <value>
``<label>_greater=<value>`` generates::
metric.label.<label> > <value>
``<label>_greaterequal=<value>`` generates::
metric.label.<label> >= <value>
:type args: tuple
:param args: Raw filter expression strings to include in the
conjunction. If just one is provided and no keyword arguments
are provided, it can be a disjunction.
:type kwargs: dict
:param kwargs: Label filters to include in the conjunction as
described above.
:rtype: :class:`Query`
:returns: The new query object.
"""
new_query = copy.deepcopy(self)
new_query._filter.select_metrics(*args, **kwargs)
return new_query
|
python
|
def select_metrics(self, *args, **kwargs):
"""Copy the query and add filtering by metric labels.
Examples::
query = query.select_metrics(instance_name='myinstance')
query = query.select_metrics(instance_name_prefix='mycluster-')
A keyword argument ``<label>=<value>`` ordinarily generates a filter
expression of the form::
metric.label.<label> = "<value>"
However, by adding ``"_prefix"`` or ``"_suffix"`` to the keyword,
you can specify a partial match.
``<label>_prefix=<value>`` generates::
metric.label.<label> = starts_with("<value>")
``<label>_suffix=<value>`` generates::
metric.label.<label> = ends_with("<value>")
If the label's value type is ``INT64``, a similar notation can be
used to express inequalities:
``<label>_less=<value>`` generates::
metric.label.<label> < <value>
``<label>_lessequal=<value>`` generates::
metric.label.<label> <= <value>
``<label>_greater=<value>`` generates::
metric.label.<label> > <value>
``<label>_greaterequal=<value>`` generates::
metric.label.<label> >= <value>
:type args: tuple
:param args: Raw filter expression strings to include in the
conjunction. If just one is provided and no keyword arguments
are provided, it can be a disjunction.
:type kwargs: dict
:param kwargs: Label filters to include in the conjunction as
described above.
:rtype: :class:`Query`
:returns: The new query object.
"""
new_query = copy.deepcopy(self)
new_query._filter.select_metrics(*args, **kwargs)
return new_query
|
[
"def",
"select_metrics",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"new_query",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
")",
"new_query",
".",
"_filter",
".",
"select_metrics",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"new_query"
] |
Copy the query and add filtering by metric labels.
Examples::
query = query.select_metrics(instance_name='myinstance')
query = query.select_metrics(instance_name_prefix='mycluster-')
A keyword argument ``<label>=<value>`` ordinarily generates a filter
expression of the form::
metric.label.<label> = "<value>"
However, by adding ``"_prefix"`` or ``"_suffix"`` to the keyword,
you can specify a partial match.
``<label>_prefix=<value>`` generates::
metric.label.<label> = starts_with("<value>")
``<label>_suffix=<value>`` generates::
metric.label.<label> = ends_with("<value>")
If the label's value type is ``INT64``, a similar notation can be
used to express inequalities:
``<label>_less=<value>`` generates::
metric.label.<label> < <value>
``<label>_lessequal=<value>`` generates::
metric.label.<label> <= <value>
``<label>_greater=<value>`` generates::
metric.label.<label> > <value>
``<label>_greaterequal=<value>`` generates::
metric.label.<label> >= <value>
:type args: tuple
:param args: Raw filter expression strings to include in the
conjunction. If just one is provided and no keyword arguments
are provided, it can be a disjunction.
:type kwargs: dict
:param kwargs: Label filters to include in the conjunction as
described above.
:rtype: :class:`Query`
:returns: The new query object.
|
[
"Copy",
"the",
"query",
"and",
"add",
"filtering",
"by",
"metric",
"labels",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/monitoring/google/cloud/monitoring_v3/query.py#L264-L321
|
train
|
googleapis/google-cloud-python
|
monitoring/google/cloud/monitoring_v3/query.py
|
Query.align
|
def align(self, per_series_aligner, seconds=0, minutes=0, hours=0):
"""Copy the query and add temporal alignment.
If ``per_series_aligner`` is not :data:`Aligner.ALIGN_NONE`, each time
series will contain data points only on the period boundaries.
Example::
from google.cloud.monitoring import enums
query = query.align(
enums.Aggregation.Aligner.ALIGN_MEAN, minutes=5)
It is also possible to specify the aligner as a literal string::
query = query.align('ALIGN_MEAN', minutes=5)
:type per_series_aligner: str or
:class:`~google.cloud.monitoring_v3.gapic.enums.Aggregation.Aligner`
:param per_series_aligner: The approach to be used to align
individual time series. For example: :data:`Aligner.ALIGN_MEAN`.
See
:class:`~google.cloud.monitoring_v3.gapic.enums.Aggregation.Aligner`
and the descriptions of the `supported aligners`_.
:type seconds: int
:param seconds: The number of seconds in the alignment period.
:type minutes: int
:param minutes: The number of minutes in the alignment period.
:type hours: int
:param hours: The number of hours in the alignment period.
:rtype: :class:`Query`
:returns: The new query object.
.. _supported aligners:
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
projects.timeSeries/list#Aligner
"""
new_query = copy.deepcopy(self)
new_query._per_series_aligner = per_series_aligner
new_query._alignment_period_seconds = seconds + 60 * (minutes + 60 * hours)
return new_query
|
python
|
def align(self, per_series_aligner, seconds=0, minutes=0, hours=0):
"""Copy the query and add temporal alignment.
If ``per_series_aligner`` is not :data:`Aligner.ALIGN_NONE`, each time
series will contain data points only on the period boundaries.
Example::
from google.cloud.monitoring import enums
query = query.align(
enums.Aggregation.Aligner.ALIGN_MEAN, minutes=5)
It is also possible to specify the aligner as a literal string::
query = query.align('ALIGN_MEAN', minutes=5)
:type per_series_aligner: str or
:class:`~google.cloud.monitoring_v3.gapic.enums.Aggregation.Aligner`
:param per_series_aligner: The approach to be used to align
individual time series. For example: :data:`Aligner.ALIGN_MEAN`.
See
:class:`~google.cloud.monitoring_v3.gapic.enums.Aggregation.Aligner`
and the descriptions of the `supported aligners`_.
:type seconds: int
:param seconds: The number of seconds in the alignment period.
:type minutes: int
:param minutes: The number of minutes in the alignment period.
:type hours: int
:param hours: The number of hours in the alignment period.
:rtype: :class:`Query`
:returns: The new query object.
.. _supported aligners:
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
projects.timeSeries/list#Aligner
"""
new_query = copy.deepcopy(self)
new_query._per_series_aligner = per_series_aligner
new_query._alignment_period_seconds = seconds + 60 * (minutes + 60 * hours)
return new_query
|
[
"def",
"align",
"(",
"self",
",",
"per_series_aligner",
",",
"seconds",
"=",
"0",
",",
"minutes",
"=",
"0",
",",
"hours",
"=",
"0",
")",
":",
"new_query",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
")",
"new_query",
".",
"_per_series_aligner",
"=",
"per_series_aligner",
"new_query",
".",
"_alignment_period_seconds",
"=",
"seconds",
"+",
"60",
"*",
"(",
"minutes",
"+",
"60",
"*",
"hours",
")",
"return",
"new_query"
] |
Copy the query and add temporal alignment.
If ``per_series_aligner`` is not :data:`Aligner.ALIGN_NONE`, each time
series will contain data points only on the period boundaries.
Example::
from google.cloud.monitoring import enums
query = query.align(
enums.Aggregation.Aligner.ALIGN_MEAN, minutes=5)
It is also possible to specify the aligner as a literal string::
query = query.align('ALIGN_MEAN', minutes=5)
:type per_series_aligner: str or
:class:`~google.cloud.monitoring_v3.gapic.enums.Aggregation.Aligner`
:param per_series_aligner: The approach to be used to align
individual time series. For example: :data:`Aligner.ALIGN_MEAN`.
See
:class:`~google.cloud.monitoring_v3.gapic.enums.Aggregation.Aligner`
and the descriptions of the `supported aligners`_.
:type seconds: int
:param seconds: The number of seconds in the alignment period.
:type minutes: int
:param minutes: The number of minutes in the alignment period.
:type hours: int
:param hours: The number of hours in the alignment period.
:rtype: :class:`Query`
:returns: The new query object.
.. _supported aligners:
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
projects.timeSeries/list#Aligner
|
[
"Copy",
"the",
"query",
"and",
"add",
"temporal",
"alignment",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/monitoring/google/cloud/monitoring_v3/query.py#L323-L366
|
train
|
googleapis/google-cloud-python
|
monitoring/google/cloud/monitoring_v3/query.py
|
Query.reduce
|
def reduce(self, cross_series_reducer, *group_by_fields):
"""Copy the query and add cross-series reduction.
Cross-series reduction combines time series by aggregating their
data points.
For example, you could request an aggregated time series for each
combination of project and zone as follows::
from google.cloud.monitoring import enums
query = query.reduce(enums.Aggregation.Reducer.REDUCE_MEAN,
'resource.project_id', 'resource.zone')
:type cross_series_reducer: str or
:class:`~google.cloud.monitoring_v3.gapic.enums.Aggregation.Reducer`
:param cross_series_reducer:
The approach to be used to combine time series. For example:
:data:`Reducer.REDUCE_MEAN`. See
:class:`~google.cloud.monitoring_v3.gapic.enums.Aggregation.Reducer`
and the descriptions of the `supported reducers`_.
:type group_by_fields: strs
:param group_by_fields:
Fields to be preserved by the reduction. For example, specifying
just ``"resource.zone"`` will result in one time series per zone.
The default is to aggregate all of the time series into just one.
:rtype: :class:`Query`
:returns: The new query object.
.. _supported reducers:
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
projects.timeSeries/list#Reducer
"""
new_query = copy.deepcopy(self)
new_query._cross_series_reducer = cross_series_reducer
new_query._group_by_fields = group_by_fields
return new_query
|
python
|
def reduce(self, cross_series_reducer, *group_by_fields):
"""Copy the query and add cross-series reduction.
Cross-series reduction combines time series by aggregating their
data points.
For example, you could request an aggregated time series for each
combination of project and zone as follows::
from google.cloud.monitoring import enums
query = query.reduce(enums.Aggregation.Reducer.REDUCE_MEAN,
'resource.project_id', 'resource.zone')
:type cross_series_reducer: str or
:class:`~google.cloud.monitoring_v3.gapic.enums.Aggregation.Reducer`
:param cross_series_reducer:
The approach to be used to combine time series. For example:
:data:`Reducer.REDUCE_MEAN`. See
:class:`~google.cloud.monitoring_v3.gapic.enums.Aggregation.Reducer`
and the descriptions of the `supported reducers`_.
:type group_by_fields: strs
:param group_by_fields:
Fields to be preserved by the reduction. For example, specifying
just ``"resource.zone"`` will result in one time series per zone.
The default is to aggregate all of the time series into just one.
:rtype: :class:`Query`
:returns: The new query object.
.. _supported reducers:
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
projects.timeSeries/list#Reducer
"""
new_query = copy.deepcopy(self)
new_query._cross_series_reducer = cross_series_reducer
new_query._group_by_fields = group_by_fields
return new_query
|
[
"def",
"reduce",
"(",
"self",
",",
"cross_series_reducer",
",",
"*",
"group_by_fields",
")",
":",
"new_query",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
")",
"new_query",
".",
"_cross_series_reducer",
"=",
"cross_series_reducer",
"new_query",
".",
"_group_by_fields",
"=",
"group_by_fields",
"return",
"new_query"
] |
Copy the query and add cross-series reduction.
Cross-series reduction combines time series by aggregating their
data points.
For example, you could request an aggregated time series for each
combination of project and zone as follows::
from google.cloud.monitoring import enums
query = query.reduce(enums.Aggregation.Reducer.REDUCE_MEAN,
'resource.project_id', 'resource.zone')
:type cross_series_reducer: str or
:class:`~google.cloud.monitoring_v3.gapic.enums.Aggregation.Reducer`
:param cross_series_reducer:
The approach to be used to combine time series. For example:
:data:`Reducer.REDUCE_MEAN`. See
:class:`~google.cloud.monitoring_v3.gapic.enums.Aggregation.Reducer`
and the descriptions of the `supported reducers`_.
:type group_by_fields: strs
:param group_by_fields:
Fields to be preserved by the reduction. For example, specifying
just ``"resource.zone"`` will result in one time series per zone.
The default is to aggregate all of the time series into just one.
:rtype: :class:`Query`
:returns: The new query object.
.. _supported reducers:
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
projects.timeSeries/list#Reducer
|
[
"Copy",
"the",
"query",
"and",
"add",
"cross",
"-",
"series",
"reduction",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/monitoring/google/cloud/monitoring_v3/query.py#L368-L405
|
train
|
googleapis/google-cloud-python
|
monitoring/google/cloud/monitoring_v3/query.py
|
Query.iter
|
def iter(self, headers_only=False, page_size=None):
"""Yield all time series objects selected by the query.
The generator returned iterates over
:class:`~google.cloud.monitoring_v3.types.TimeSeries` objects
containing points ordered from oldest to newest.
Note that the :class:`Query` object itself is an iterable, such that
the following are equivalent::
for timeseries in query:
...
for timeseries in query.iter():
...
:type headers_only: bool
:param headers_only:
Whether to omit the point data from the time series objects.
:type page_size: int
:param page_size:
(Optional) The maximum number of points in each page of results
from this request. Non-positive values are ignored. Defaults
to a sensible value set by the API.
:raises: :exc:`ValueError` if the query time interval has not been
specified.
"""
if self._end_time is None:
raise ValueError("Query time interval not specified.")
params = self._build_query_params(headers_only, page_size)
for ts in self._client.list_time_series(**params):
yield ts
|
python
|
def iter(self, headers_only=False, page_size=None):
"""Yield all time series objects selected by the query.
The generator returned iterates over
:class:`~google.cloud.monitoring_v3.types.TimeSeries` objects
containing points ordered from oldest to newest.
Note that the :class:`Query` object itself is an iterable, such that
the following are equivalent::
for timeseries in query:
...
for timeseries in query.iter():
...
:type headers_only: bool
:param headers_only:
Whether to omit the point data from the time series objects.
:type page_size: int
:param page_size:
(Optional) The maximum number of points in each page of results
from this request. Non-positive values are ignored. Defaults
to a sensible value set by the API.
:raises: :exc:`ValueError` if the query time interval has not been
specified.
"""
if self._end_time is None:
raise ValueError("Query time interval not specified.")
params = self._build_query_params(headers_only, page_size)
for ts in self._client.list_time_series(**params):
yield ts
|
[
"def",
"iter",
"(",
"self",
",",
"headers_only",
"=",
"False",
",",
"page_size",
"=",
"None",
")",
":",
"if",
"self",
".",
"_end_time",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Query time interval not specified.\"",
")",
"params",
"=",
"self",
".",
"_build_query_params",
"(",
"headers_only",
",",
"page_size",
")",
"for",
"ts",
"in",
"self",
".",
"_client",
".",
"list_time_series",
"(",
"*",
"*",
"params",
")",
":",
"yield",
"ts"
] |
Yield all time series objects selected by the query.
The generator returned iterates over
:class:`~google.cloud.monitoring_v3.types.TimeSeries` objects
containing points ordered from oldest to newest.
Note that the :class:`Query` object itself is an iterable, such that
the following are equivalent::
for timeseries in query:
...
for timeseries in query.iter():
...
:type headers_only: bool
:param headers_only:
Whether to omit the point data from the time series objects.
:type page_size: int
:param page_size:
(Optional) The maximum number of points in each page of results
from this request. Non-positive values are ignored. Defaults
to a sensible value set by the API.
:raises: :exc:`ValueError` if the query time interval has not been
specified.
|
[
"Yield",
"all",
"time",
"series",
"objects",
"selected",
"by",
"the",
"query",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/monitoring/google/cloud/monitoring_v3/query.py#L407-L441
|
train
|
googleapis/google-cloud-python
|
monitoring/google/cloud/monitoring_v3/query.py
|
Query._build_query_params
|
def _build_query_params(self, headers_only=False, page_size=None):
"""Return key-value pairs for the list_time_series API call.
:type headers_only: bool
:param headers_only:
Whether to omit the point data from the
:class:`~google.cloud.monitoring_v3.types.TimeSeries` objects.
:type page_size: int
:param page_size:
(Optional) The maximum number of points in each page of results
from this request. Non-positive values are ignored. Defaults
to a sensible value set by the API.
"""
params = {"name": self._project_path, "filter_": self.filter}
params["interval"] = types.TimeInterval()
params["interval"].end_time.FromDatetime(self._end_time)
if self._start_time:
params["interval"].start_time.FromDatetime(self._start_time)
if (
self._per_series_aligner
or self._alignment_period_seconds
or self._cross_series_reducer
or self._group_by_fields
):
params["aggregation"] = types.Aggregation(
per_series_aligner=self._per_series_aligner,
cross_series_reducer=self._cross_series_reducer,
group_by_fields=self._group_by_fields,
alignment_period={"seconds": self._alignment_period_seconds},
)
if headers_only:
params["view"] = enums.ListTimeSeriesRequest.TimeSeriesView.HEADERS
else:
params["view"] = enums.ListTimeSeriesRequest.TimeSeriesView.FULL
if page_size is not None:
params["page_size"] = page_size
return params
|
python
|
def _build_query_params(self, headers_only=False, page_size=None):
"""Return key-value pairs for the list_time_series API call.
:type headers_only: bool
:param headers_only:
Whether to omit the point data from the
:class:`~google.cloud.monitoring_v3.types.TimeSeries` objects.
:type page_size: int
:param page_size:
(Optional) The maximum number of points in each page of results
from this request. Non-positive values are ignored. Defaults
to a sensible value set by the API.
"""
params = {"name": self._project_path, "filter_": self.filter}
params["interval"] = types.TimeInterval()
params["interval"].end_time.FromDatetime(self._end_time)
if self._start_time:
params["interval"].start_time.FromDatetime(self._start_time)
if (
self._per_series_aligner
or self._alignment_period_seconds
or self._cross_series_reducer
or self._group_by_fields
):
params["aggregation"] = types.Aggregation(
per_series_aligner=self._per_series_aligner,
cross_series_reducer=self._cross_series_reducer,
group_by_fields=self._group_by_fields,
alignment_period={"seconds": self._alignment_period_seconds},
)
if headers_only:
params["view"] = enums.ListTimeSeriesRequest.TimeSeriesView.HEADERS
else:
params["view"] = enums.ListTimeSeriesRequest.TimeSeriesView.FULL
if page_size is not None:
params["page_size"] = page_size
return params
|
[
"def",
"_build_query_params",
"(",
"self",
",",
"headers_only",
"=",
"False",
",",
"page_size",
"=",
"None",
")",
":",
"params",
"=",
"{",
"\"name\"",
":",
"self",
".",
"_project_path",
",",
"\"filter_\"",
":",
"self",
".",
"filter",
"}",
"params",
"[",
"\"interval\"",
"]",
"=",
"types",
".",
"TimeInterval",
"(",
")",
"params",
"[",
"\"interval\"",
"]",
".",
"end_time",
".",
"FromDatetime",
"(",
"self",
".",
"_end_time",
")",
"if",
"self",
".",
"_start_time",
":",
"params",
"[",
"\"interval\"",
"]",
".",
"start_time",
".",
"FromDatetime",
"(",
"self",
".",
"_start_time",
")",
"if",
"(",
"self",
".",
"_per_series_aligner",
"or",
"self",
".",
"_alignment_period_seconds",
"or",
"self",
".",
"_cross_series_reducer",
"or",
"self",
".",
"_group_by_fields",
")",
":",
"params",
"[",
"\"aggregation\"",
"]",
"=",
"types",
".",
"Aggregation",
"(",
"per_series_aligner",
"=",
"self",
".",
"_per_series_aligner",
",",
"cross_series_reducer",
"=",
"self",
".",
"_cross_series_reducer",
",",
"group_by_fields",
"=",
"self",
".",
"_group_by_fields",
",",
"alignment_period",
"=",
"{",
"\"seconds\"",
":",
"self",
".",
"_alignment_period_seconds",
"}",
",",
")",
"if",
"headers_only",
":",
"params",
"[",
"\"view\"",
"]",
"=",
"enums",
".",
"ListTimeSeriesRequest",
".",
"TimeSeriesView",
".",
"HEADERS",
"else",
":",
"params",
"[",
"\"view\"",
"]",
"=",
"enums",
".",
"ListTimeSeriesRequest",
".",
"TimeSeriesView",
".",
"FULL",
"if",
"page_size",
"is",
"not",
"None",
":",
"params",
"[",
"\"page_size\"",
"]",
"=",
"page_size",
"return",
"params"
] |
Return key-value pairs for the list_time_series API call.
:type headers_only: bool
:param headers_only:
Whether to omit the point data from the
:class:`~google.cloud.monitoring_v3.types.TimeSeries` objects.
:type page_size: int
:param page_size:
(Optional) The maximum number of points in each page of results
from this request. Non-positive values are ignored. Defaults
to a sensible value set by the API.
|
[
"Return",
"key",
"-",
"value",
"pairs",
"for",
"the",
"list_time_series",
"API",
"call",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/monitoring/google/cloud/monitoring_v3/query.py#L443-L485
|
train
|
googleapis/google-cloud-python
|
resource_manager/google/cloud/resource_manager/project.py
|
Project.from_api_repr
|
def from_api_repr(cls, resource, client):
"""Factory: construct a project given its API representation.
:type resource: dict
:param resource: project resource representation returned from the API
:type client: :class:`google.cloud.resource_manager.client.Client`
:param client: The Client used with this project.
:rtype: :class:`google.cloud.resource_manager.project.Project`
:returns: The project created.
"""
project = cls(project_id=resource["projectId"], client=client)
project.set_properties_from_api_repr(resource)
return project
|
python
|
def from_api_repr(cls, resource, client):
"""Factory: construct a project given its API representation.
:type resource: dict
:param resource: project resource representation returned from the API
:type client: :class:`google.cloud.resource_manager.client.Client`
:param client: The Client used with this project.
:rtype: :class:`google.cloud.resource_manager.project.Project`
:returns: The project created.
"""
project = cls(project_id=resource["projectId"], client=client)
project.set_properties_from_api_repr(resource)
return project
|
[
"def",
"from_api_repr",
"(",
"cls",
",",
"resource",
",",
"client",
")",
":",
"project",
"=",
"cls",
"(",
"project_id",
"=",
"resource",
"[",
"\"projectId\"",
"]",
",",
"client",
"=",
"client",
")",
"project",
".",
"set_properties_from_api_repr",
"(",
"resource",
")",
"return",
"project"
] |
Factory: construct a project given its API representation.
:type resource: dict
:param resource: project resource representation returned from the API
:type client: :class:`google.cloud.resource_manager.client.Client`
:param client: The Client used with this project.
:rtype: :class:`google.cloud.resource_manager.project.Project`
:returns: The project created.
|
[
"Factory",
":",
"construct",
"a",
"project",
"given",
"its",
"API",
"representation",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/resource_manager/google/cloud/resource_manager/project.py#L68-L82
|
train
|
googleapis/google-cloud-python
|
resource_manager/google/cloud/resource_manager/project.py
|
Project.set_properties_from_api_repr
|
def set_properties_from_api_repr(self, resource):
"""Update specific properties from its API representation."""
self.name = resource.get("name")
self.number = resource["projectNumber"]
self.labels = resource.get("labels", {})
self.status = resource["lifecycleState"]
if "parent" in resource:
self.parent = resource["parent"]
|
python
|
def set_properties_from_api_repr(self, resource):
"""Update specific properties from its API representation."""
self.name = resource.get("name")
self.number = resource["projectNumber"]
self.labels = resource.get("labels", {})
self.status = resource["lifecycleState"]
if "parent" in resource:
self.parent = resource["parent"]
|
[
"def",
"set_properties_from_api_repr",
"(",
"self",
",",
"resource",
")",
":",
"self",
".",
"name",
"=",
"resource",
".",
"get",
"(",
"\"name\"",
")",
"self",
".",
"number",
"=",
"resource",
"[",
"\"projectNumber\"",
"]",
"self",
".",
"labels",
"=",
"resource",
".",
"get",
"(",
"\"labels\"",
",",
"{",
"}",
")",
"self",
".",
"status",
"=",
"resource",
"[",
"\"lifecycleState\"",
"]",
"if",
"\"parent\"",
"in",
"resource",
":",
"self",
".",
"parent",
"=",
"resource",
"[",
"\"parent\"",
"]"
] |
Update specific properties from its API representation.
|
[
"Update",
"specific",
"properties",
"from",
"its",
"API",
"representation",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/resource_manager/google/cloud/resource_manager/project.py#L84-L91
|
train
|
googleapis/google-cloud-python
|
resource_manager/google/cloud/resource_manager/project.py
|
Project.create
|
def create(self, client=None):
"""API call: create the project via a ``POST`` request.
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/create
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current project.
"""
client = self._require_client(client)
data = {"projectId": self.project_id, "name": self.name, "labels": self.labels}
resp = client._connection.api_request(
method="POST", path="/projects", data=data
)
self.set_properties_from_api_repr(resource=resp)
|
python
|
def create(self, client=None):
"""API call: create the project via a ``POST`` request.
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/create
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current project.
"""
client = self._require_client(client)
data = {"projectId": self.project_id, "name": self.name, "labels": self.labels}
resp = client._connection.api_request(
method="POST", path="/projects", data=data
)
self.set_properties_from_api_repr(resource=resp)
|
[
"def",
"create",
"(",
"self",
",",
"client",
"=",
"None",
")",
":",
"client",
"=",
"self",
".",
"_require_client",
"(",
"client",
")",
"data",
"=",
"{",
"\"projectId\"",
":",
"self",
".",
"project_id",
",",
"\"name\"",
":",
"self",
".",
"name",
",",
"\"labels\"",
":",
"self",
".",
"labels",
"}",
"resp",
"=",
"client",
".",
"_connection",
".",
"api_request",
"(",
"method",
"=",
"\"POST\"",
",",
"path",
"=",
"\"/projects\"",
",",
"data",
"=",
"data",
")",
"self",
".",
"set_properties_from_api_repr",
"(",
"resource",
"=",
"resp",
")"
] |
API call: create the project via a ``POST`` request.
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/create
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current project.
|
[
"API",
"call",
":",
"create",
"the",
"project",
"via",
"a",
"POST",
"request",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/resource_manager/google/cloud/resource_manager/project.py#L120-L137
|
train
|
googleapis/google-cloud-python
|
resource_manager/google/cloud/resource_manager/project.py
|
Project.update
|
def update(self, client=None):
"""API call: update the project via a ``PUT`` request.
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/update
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current project.
"""
client = self._require_client(client)
data = {"name": self.name, "labels": self.labels, "parent": self.parent}
resp = client._connection.api_request(method="PUT", path=self.path, data=data)
self.set_properties_from_api_repr(resp)
|
python
|
def update(self, client=None):
"""API call: update the project via a ``PUT`` request.
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/update
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current project.
"""
client = self._require_client(client)
data = {"name": self.name, "labels": self.labels, "parent": self.parent}
resp = client._connection.api_request(method="PUT", path=self.path, data=data)
self.set_properties_from_api_repr(resp)
|
[
"def",
"update",
"(",
"self",
",",
"client",
"=",
"None",
")",
":",
"client",
"=",
"self",
".",
"_require_client",
"(",
"client",
")",
"data",
"=",
"{",
"\"name\"",
":",
"self",
".",
"name",
",",
"\"labels\"",
":",
"self",
".",
"labels",
",",
"\"parent\"",
":",
"self",
".",
"parent",
"}",
"resp",
"=",
"client",
".",
"_connection",
".",
"api_request",
"(",
"method",
"=",
"\"PUT\"",
",",
"path",
"=",
"self",
".",
"path",
",",
"data",
"=",
"data",
")",
"self",
".",
"set_properties_from_api_repr",
"(",
"resp",
")"
] |
API call: update the project via a ``PUT`` request.
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/update
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current project.
|
[
"API",
"call",
":",
"update",
"the",
"project",
"via",
"a",
"PUT",
"request",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/resource_manager/google/cloud/resource_manager/project.py#L193-L209
|
train
|
googleapis/google-cloud-python
|
resource_manager/google/cloud/resource_manager/project.py
|
Project.delete
|
def delete(self, client=None, reload_data=False):
"""API call: delete the project via a ``DELETE`` request.
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/delete
This actually changes the status (``lifecycleState``) from ``ACTIVE``
to ``DELETE_REQUESTED``.
Later (it's not specified when), the project will move into the
``DELETE_IN_PROGRESS`` state, which means the deleting has actually
begun.
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current project.
:type reload_data: bool
:param reload_data: Whether to reload the project with the latest
state. If you want to get the updated status,
you'll want this set to :data:`True` as the DELETE
method doesn't send back the updated project.
Default: :data:`False`.
"""
client = self._require_client(client)
client._connection.api_request(method="DELETE", path=self.path)
# If the reload flag is set, reload the project.
if reload_data:
self.reload()
|
python
|
def delete(self, client=None, reload_data=False):
"""API call: delete the project via a ``DELETE`` request.
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/delete
This actually changes the status (``lifecycleState``) from ``ACTIVE``
to ``DELETE_REQUESTED``.
Later (it's not specified when), the project will move into the
``DELETE_IN_PROGRESS`` state, which means the deleting has actually
begun.
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current project.
:type reload_data: bool
:param reload_data: Whether to reload the project with the latest
state. If you want to get the updated status,
you'll want this set to :data:`True` as the DELETE
method doesn't send back the updated project.
Default: :data:`False`.
"""
client = self._require_client(client)
client._connection.api_request(method="DELETE", path=self.path)
# If the reload flag is set, reload the project.
if reload_data:
self.reload()
|
[
"def",
"delete",
"(",
"self",
",",
"client",
"=",
"None",
",",
"reload_data",
"=",
"False",
")",
":",
"client",
"=",
"self",
".",
"_require_client",
"(",
"client",
")",
"client",
".",
"_connection",
".",
"api_request",
"(",
"method",
"=",
"\"DELETE\"",
",",
"path",
"=",
"self",
".",
"path",
")",
"# If the reload flag is set, reload the project.",
"if",
"reload_data",
":",
"self",
".",
"reload",
"(",
")"
] |
API call: delete the project via a ``DELETE`` request.
See
https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/delete
This actually changes the status (``lifecycleState``) from ``ACTIVE``
to ``DELETE_REQUESTED``.
Later (it's not specified when), the project will move into the
``DELETE_IN_PROGRESS`` state, which means the deleting has actually
begun.
:type client: :class:`google.cloud.resource_manager.client.Client` or
:data:`NoneType <types.NoneType>`
:param client: the client to use. If not passed, falls back to
the client stored on the current project.
:type reload_data: bool
:param reload_data: Whether to reload the project with the latest
state. If you want to get the updated status,
you'll want this set to :data:`True` as the DELETE
method doesn't send back the updated project.
Default: :data:`False`.
|
[
"API",
"call",
":",
"delete",
"the",
"project",
"via",
"a",
"DELETE",
"request",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/resource_manager/google/cloud/resource_manager/project.py#L211-L240
|
train
|
googleapis/google-cloud-python
|
datastore/google/cloud/datastore/helpers.py
|
_get_meaning
|
def _get_meaning(value_pb, is_list=False):
"""Get the meaning from a protobuf value.
:type value_pb: :class:`.entity_pb2.Value`
:param value_pb: The protobuf value to be checked for an
associated meaning.
:type is_list: bool
:param is_list: Boolean indicating if the ``value_pb`` contains
a list value.
:rtype: int
:returns: The meaning for the ``value_pb`` if one is set, else
:data:`None`. For a list value, if there are disagreeing
means it just returns a list of meanings. If all the
list meanings agree, it just condenses them.
"""
meaning = None
if is_list:
# An empty list will have no values, hence no shared meaning
# set among them.
if len(value_pb.array_value.values) == 0:
return None
# We check among all the meanings, some of which may be None,
# the rest which may be enum/int values.
all_meanings = [
_get_meaning(sub_value_pb) for sub_value_pb in value_pb.array_value.values
]
unique_meanings = set(all_meanings)
if len(unique_meanings) == 1:
# If there is a unique meaning, we preserve it.
meaning = unique_meanings.pop()
else: # We know len(value_pb.array_value.values) > 0.
# If the meaning is not unique, just return all of them.
meaning = all_meanings
elif value_pb.meaning: # Simple field (int32).
meaning = value_pb.meaning
return meaning
|
python
|
def _get_meaning(value_pb, is_list=False):
"""Get the meaning from a protobuf value.
:type value_pb: :class:`.entity_pb2.Value`
:param value_pb: The protobuf value to be checked for an
associated meaning.
:type is_list: bool
:param is_list: Boolean indicating if the ``value_pb`` contains
a list value.
:rtype: int
:returns: The meaning for the ``value_pb`` if one is set, else
:data:`None`. For a list value, if there are disagreeing
means it just returns a list of meanings. If all the
list meanings agree, it just condenses them.
"""
meaning = None
if is_list:
# An empty list will have no values, hence no shared meaning
# set among them.
if len(value_pb.array_value.values) == 0:
return None
# We check among all the meanings, some of which may be None,
# the rest which may be enum/int values.
all_meanings = [
_get_meaning(sub_value_pb) for sub_value_pb in value_pb.array_value.values
]
unique_meanings = set(all_meanings)
if len(unique_meanings) == 1:
# If there is a unique meaning, we preserve it.
meaning = unique_meanings.pop()
else: # We know len(value_pb.array_value.values) > 0.
# If the meaning is not unique, just return all of them.
meaning = all_meanings
elif value_pb.meaning: # Simple field (int32).
meaning = value_pb.meaning
return meaning
|
[
"def",
"_get_meaning",
"(",
"value_pb",
",",
"is_list",
"=",
"False",
")",
":",
"meaning",
"=",
"None",
"if",
"is_list",
":",
"# An empty list will have no values, hence no shared meaning",
"# set among them.",
"if",
"len",
"(",
"value_pb",
".",
"array_value",
".",
"values",
")",
"==",
"0",
":",
"return",
"None",
"# We check among all the meanings, some of which may be None,",
"# the rest which may be enum/int values.",
"all_meanings",
"=",
"[",
"_get_meaning",
"(",
"sub_value_pb",
")",
"for",
"sub_value_pb",
"in",
"value_pb",
".",
"array_value",
".",
"values",
"]",
"unique_meanings",
"=",
"set",
"(",
"all_meanings",
")",
"if",
"len",
"(",
"unique_meanings",
")",
"==",
"1",
":",
"# If there is a unique meaning, we preserve it.",
"meaning",
"=",
"unique_meanings",
".",
"pop",
"(",
")",
"else",
":",
"# We know len(value_pb.array_value.values) > 0.",
"# If the meaning is not unique, just return all of them.",
"meaning",
"=",
"all_meanings",
"elif",
"value_pb",
".",
"meaning",
":",
"# Simple field (int32).",
"meaning",
"=",
"value_pb",
".",
"meaning",
"return",
"meaning"
] |
Get the meaning from a protobuf value.
:type value_pb: :class:`.entity_pb2.Value`
:param value_pb: The protobuf value to be checked for an
associated meaning.
:type is_list: bool
:param is_list: Boolean indicating if the ``value_pb`` contains
a list value.
:rtype: int
:returns: The meaning for the ``value_pb`` if one is set, else
:data:`None`. For a list value, if there are disagreeing
means it just returns a list of meanings. If all the
list meanings agree, it just condenses them.
|
[
"Get",
"the",
"meaning",
"from",
"a",
"protobuf",
"value",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/helpers.py#L35-L74
|
train
|
googleapis/google-cloud-python
|
datastore/google/cloud/datastore/helpers.py
|
entity_from_protobuf
|
def entity_from_protobuf(pb):
"""Factory method for creating an entity based on a protobuf.
The protobuf should be one returned from the Cloud Datastore
Protobuf API.
:type pb: :class:`.entity_pb2.Entity`
:param pb: The Protobuf representing the entity.
:rtype: :class:`google.cloud.datastore.entity.Entity`
:returns: The entity derived from the protobuf.
"""
key = None
if pb.HasField("key"): # Message field (Key)
key = key_from_protobuf(pb.key)
entity_props = {}
entity_meanings = {}
exclude_from_indexes = []
for prop_name, value_pb in _property_tuples(pb):
value = _get_value_from_value_pb(value_pb)
entity_props[prop_name] = value
# Check if the property has an associated meaning.
is_list = isinstance(value, list)
meaning = _get_meaning(value_pb, is_list=is_list)
if meaning is not None:
entity_meanings[prop_name] = (meaning, value)
# Check if ``value_pb`` was excluded from index. Lists need to be
# special-cased and we require all ``exclude_from_indexes`` values
# in a list agree.
if is_list and len(value) > 0:
exclude_values = set(
value_pb.exclude_from_indexes
for value_pb in value_pb.array_value.values
)
if len(exclude_values) != 1:
raise ValueError(
"For an array_value, subvalues must either "
"all be indexed or all excluded from "
"indexes."
)
if exclude_values.pop():
exclude_from_indexes.append(prop_name)
else:
if value_pb.exclude_from_indexes:
exclude_from_indexes.append(prop_name)
entity = Entity(key=key, exclude_from_indexes=exclude_from_indexes)
entity.update(entity_props)
entity._meanings.update(entity_meanings)
return entity
|
python
|
def entity_from_protobuf(pb):
"""Factory method for creating an entity based on a protobuf.
The protobuf should be one returned from the Cloud Datastore
Protobuf API.
:type pb: :class:`.entity_pb2.Entity`
:param pb: The Protobuf representing the entity.
:rtype: :class:`google.cloud.datastore.entity.Entity`
:returns: The entity derived from the protobuf.
"""
key = None
if pb.HasField("key"): # Message field (Key)
key = key_from_protobuf(pb.key)
entity_props = {}
entity_meanings = {}
exclude_from_indexes = []
for prop_name, value_pb in _property_tuples(pb):
value = _get_value_from_value_pb(value_pb)
entity_props[prop_name] = value
# Check if the property has an associated meaning.
is_list = isinstance(value, list)
meaning = _get_meaning(value_pb, is_list=is_list)
if meaning is not None:
entity_meanings[prop_name] = (meaning, value)
# Check if ``value_pb`` was excluded from index. Lists need to be
# special-cased and we require all ``exclude_from_indexes`` values
# in a list agree.
if is_list and len(value) > 0:
exclude_values = set(
value_pb.exclude_from_indexes
for value_pb in value_pb.array_value.values
)
if len(exclude_values) != 1:
raise ValueError(
"For an array_value, subvalues must either "
"all be indexed or all excluded from "
"indexes."
)
if exclude_values.pop():
exclude_from_indexes.append(prop_name)
else:
if value_pb.exclude_from_indexes:
exclude_from_indexes.append(prop_name)
entity = Entity(key=key, exclude_from_indexes=exclude_from_indexes)
entity.update(entity_props)
entity._meanings.update(entity_meanings)
return entity
|
[
"def",
"entity_from_protobuf",
"(",
"pb",
")",
":",
"key",
"=",
"None",
"if",
"pb",
".",
"HasField",
"(",
"\"key\"",
")",
":",
"# Message field (Key)",
"key",
"=",
"key_from_protobuf",
"(",
"pb",
".",
"key",
")",
"entity_props",
"=",
"{",
"}",
"entity_meanings",
"=",
"{",
"}",
"exclude_from_indexes",
"=",
"[",
"]",
"for",
"prop_name",
",",
"value_pb",
"in",
"_property_tuples",
"(",
"pb",
")",
":",
"value",
"=",
"_get_value_from_value_pb",
"(",
"value_pb",
")",
"entity_props",
"[",
"prop_name",
"]",
"=",
"value",
"# Check if the property has an associated meaning.",
"is_list",
"=",
"isinstance",
"(",
"value",
",",
"list",
")",
"meaning",
"=",
"_get_meaning",
"(",
"value_pb",
",",
"is_list",
"=",
"is_list",
")",
"if",
"meaning",
"is",
"not",
"None",
":",
"entity_meanings",
"[",
"prop_name",
"]",
"=",
"(",
"meaning",
",",
"value",
")",
"# Check if ``value_pb`` was excluded from index. Lists need to be",
"# special-cased and we require all ``exclude_from_indexes`` values",
"# in a list agree.",
"if",
"is_list",
"and",
"len",
"(",
"value",
")",
">",
"0",
":",
"exclude_values",
"=",
"set",
"(",
"value_pb",
".",
"exclude_from_indexes",
"for",
"value_pb",
"in",
"value_pb",
".",
"array_value",
".",
"values",
")",
"if",
"len",
"(",
"exclude_values",
")",
"!=",
"1",
":",
"raise",
"ValueError",
"(",
"\"For an array_value, subvalues must either \"",
"\"all be indexed or all excluded from \"",
"\"indexes.\"",
")",
"if",
"exclude_values",
".",
"pop",
"(",
")",
":",
"exclude_from_indexes",
".",
"append",
"(",
"prop_name",
")",
"else",
":",
"if",
"value_pb",
".",
"exclude_from_indexes",
":",
"exclude_from_indexes",
".",
"append",
"(",
"prop_name",
")",
"entity",
"=",
"Entity",
"(",
"key",
"=",
"key",
",",
"exclude_from_indexes",
"=",
"exclude_from_indexes",
")",
"entity",
".",
"update",
"(",
"entity_props",
")",
"entity",
".",
"_meanings",
".",
"update",
"(",
"entity_meanings",
")",
"return",
"entity"
] |
Factory method for creating an entity based on a protobuf.
The protobuf should be one returned from the Cloud Datastore
Protobuf API.
:type pb: :class:`.entity_pb2.Entity`
:param pb: The Protobuf representing the entity.
:rtype: :class:`google.cloud.datastore.entity.Entity`
:returns: The entity derived from the protobuf.
|
[
"Factory",
"method",
"for",
"creating",
"an",
"entity",
"based",
"on",
"a",
"protobuf",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/helpers.py#L105-L159
|
train
|
googleapis/google-cloud-python
|
datastore/google/cloud/datastore/helpers.py
|
_set_pb_meaning_from_entity
|
def _set_pb_meaning_from_entity(entity, name, value, value_pb, is_list=False):
"""Add meaning information (from an entity) to a protobuf.
:type entity: :class:`google.cloud.datastore.entity.Entity`
:param entity: The entity to be turned into a protobuf.
:type name: str
:param name: The name of the property.
:type value: object
:param value: The current value stored as property ``name``.
:type value_pb: :class:`.entity_pb2.Value`
:param value_pb: The protobuf value to add meaning / meanings to.
:type is_list: bool
:param is_list: (Optional) Boolean indicating if the ``value`` is
a list value.
"""
if name not in entity._meanings:
return
meaning, orig_value = entity._meanings[name]
# Only add the meaning back to the protobuf if the value is
# unchanged from when it was originally read from the API.
if orig_value is not value:
return
# For lists, we set meaning on each sub-element.
if is_list:
if not isinstance(meaning, list):
meaning = itertools.repeat(meaning)
val_iter = six.moves.zip(value_pb.array_value.values, meaning)
for sub_value_pb, sub_meaning in val_iter:
if sub_meaning is not None:
sub_value_pb.meaning = sub_meaning
else:
value_pb.meaning = meaning
|
python
|
def _set_pb_meaning_from_entity(entity, name, value, value_pb, is_list=False):
"""Add meaning information (from an entity) to a protobuf.
:type entity: :class:`google.cloud.datastore.entity.Entity`
:param entity: The entity to be turned into a protobuf.
:type name: str
:param name: The name of the property.
:type value: object
:param value: The current value stored as property ``name``.
:type value_pb: :class:`.entity_pb2.Value`
:param value_pb: The protobuf value to add meaning / meanings to.
:type is_list: bool
:param is_list: (Optional) Boolean indicating if the ``value`` is
a list value.
"""
if name not in entity._meanings:
return
meaning, orig_value = entity._meanings[name]
# Only add the meaning back to the protobuf if the value is
# unchanged from when it was originally read from the API.
if orig_value is not value:
return
# For lists, we set meaning on each sub-element.
if is_list:
if not isinstance(meaning, list):
meaning = itertools.repeat(meaning)
val_iter = six.moves.zip(value_pb.array_value.values, meaning)
for sub_value_pb, sub_meaning in val_iter:
if sub_meaning is not None:
sub_value_pb.meaning = sub_meaning
else:
value_pb.meaning = meaning
|
[
"def",
"_set_pb_meaning_from_entity",
"(",
"entity",
",",
"name",
",",
"value",
",",
"value_pb",
",",
"is_list",
"=",
"False",
")",
":",
"if",
"name",
"not",
"in",
"entity",
".",
"_meanings",
":",
"return",
"meaning",
",",
"orig_value",
"=",
"entity",
".",
"_meanings",
"[",
"name",
"]",
"# Only add the meaning back to the protobuf if the value is",
"# unchanged from when it was originally read from the API.",
"if",
"orig_value",
"is",
"not",
"value",
":",
"return",
"# For lists, we set meaning on each sub-element.",
"if",
"is_list",
":",
"if",
"not",
"isinstance",
"(",
"meaning",
",",
"list",
")",
":",
"meaning",
"=",
"itertools",
".",
"repeat",
"(",
"meaning",
")",
"val_iter",
"=",
"six",
".",
"moves",
".",
"zip",
"(",
"value_pb",
".",
"array_value",
".",
"values",
",",
"meaning",
")",
"for",
"sub_value_pb",
",",
"sub_meaning",
"in",
"val_iter",
":",
"if",
"sub_meaning",
"is",
"not",
"None",
":",
"sub_value_pb",
".",
"meaning",
"=",
"sub_meaning",
"else",
":",
"value_pb",
".",
"meaning",
"=",
"meaning"
] |
Add meaning information (from an entity) to a protobuf.
:type entity: :class:`google.cloud.datastore.entity.Entity`
:param entity: The entity to be turned into a protobuf.
:type name: str
:param name: The name of the property.
:type value: object
:param value: The current value stored as property ``name``.
:type value_pb: :class:`.entity_pb2.Value`
:param value_pb: The protobuf value to add meaning / meanings to.
:type is_list: bool
:param is_list: (Optional) Boolean indicating if the ``value`` is
a list value.
|
[
"Add",
"meaning",
"information",
"(",
"from",
"an",
"entity",
")",
"to",
"a",
"protobuf",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/helpers.py#L162-L199
|
train
|
googleapis/google-cloud-python
|
datastore/google/cloud/datastore/helpers.py
|
entity_to_protobuf
|
def entity_to_protobuf(entity):
"""Converts an entity into a protobuf.
:type entity: :class:`google.cloud.datastore.entity.Entity`
:param entity: The entity to be turned into a protobuf.
:rtype: :class:`.entity_pb2.Entity`
:returns: The protobuf representing the entity.
"""
entity_pb = entity_pb2.Entity()
if entity.key is not None:
key_pb = entity.key.to_protobuf()
entity_pb.key.CopyFrom(key_pb)
for name, value in entity.items():
value_is_list = isinstance(value, list)
value_pb = _new_value_pb(entity_pb, name)
# Set the appropriate value.
_set_protobuf_value(value_pb, value)
# Add index information to protobuf.
if name in entity.exclude_from_indexes:
if not value_is_list:
value_pb.exclude_from_indexes = True
for sub_value in value_pb.array_value.values:
sub_value.exclude_from_indexes = True
# Add meaning information to protobuf.
_set_pb_meaning_from_entity(
entity, name, value, value_pb, is_list=value_is_list
)
return entity_pb
|
python
|
def entity_to_protobuf(entity):
"""Converts an entity into a protobuf.
:type entity: :class:`google.cloud.datastore.entity.Entity`
:param entity: The entity to be turned into a protobuf.
:rtype: :class:`.entity_pb2.Entity`
:returns: The protobuf representing the entity.
"""
entity_pb = entity_pb2.Entity()
if entity.key is not None:
key_pb = entity.key.to_protobuf()
entity_pb.key.CopyFrom(key_pb)
for name, value in entity.items():
value_is_list = isinstance(value, list)
value_pb = _new_value_pb(entity_pb, name)
# Set the appropriate value.
_set_protobuf_value(value_pb, value)
# Add index information to protobuf.
if name in entity.exclude_from_indexes:
if not value_is_list:
value_pb.exclude_from_indexes = True
for sub_value in value_pb.array_value.values:
sub_value.exclude_from_indexes = True
# Add meaning information to protobuf.
_set_pb_meaning_from_entity(
entity, name, value, value_pb, is_list=value_is_list
)
return entity_pb
|
[
"def",
"entity_to_protobuf",
"(",
"entity",
")",
":",
"entity_pb",
"=",
"entity_pb2",
".",
"Entity",
"(",
")",
"if",
"entity",
".",
"key",
"is",
"not",
"None",
":",
"key_pb",
"=",
"entity",
".",
"key",
".",
"to_protobuf",
"(",
")",
"entity_pb",
".",
"key",
".",
"CopyFrom",
"(",
"key_pb",
")",
"for",
"name",
",",
"value",
"in",
"entity",
".",
"items",
"(",
")",
":",
"value_is_list",
"=",
"isinstance",
"(",
"value",
",",
"list",
")",
"value_pb",
"=",
"_new_value_pb",
"(",
"entity_pb",
",",
"name",
")",
"# Set the appropriate value.",
"_set_protobuf_value",
"(",
"value_pb",
",",
"value",
")",
"# Add index information to protobuf.",
"if",
"name",
"in",
"entity",
".",
"exclude_from_indexes",
":",
"if",
"not",
"value_is_list",
":",
"value_pb",
".",
"exclude_from_indexes",
"=",
"True",
"for",
"sub_value",
"in",
"value_pb",
".",
"array_value",
".",
"values",
":",
"sub_value",
".",
"exclude_from_indexes",
"=",
"True",
"# Add meaning information to protobuf.",
"_set_pb_meaning_from_entity",
"(",
"entity",
",",
"name",
",",
"value",
",",
"value_pb",
",",
"is_list",
"=",
"value_is_list",
")",
"return",
"entity_pb"
] |
Converts an entity into a protobuf.
:type entity: :class:`google.cloud.datastore.entity.Entity`
:param entity: The entity to be turned into a protobuf.
:rtype: :class:`.entity_pb2.Entity`
:returns: The protobuf representing the entity.
|
[
"Converts",
"an",
"entity",
"into",
"a",
"protobuf",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/helpers.py#L202-L236
|
train
|
googleapis/google-cloud-python
|
datastore/google/cloud/datastore/helpers.py
|
get_read_options
|
def get_read_options(eventual, transaction_id):
"""Validate rules for read options, and assign to the request.
Helper method for ``lookup()`` and ``run_query``.
:type eventual: bool
:param eventual: Flag indicating if ``EVENTUAL`` or ``STRONG``
consistency should be used.
:type transaction_id: bytes
:param transaction_id: A transaction identifier (may be null).
:rtype: :class:`.datastore_pb2.ReadOptions`
:returns: The read options corresponding to the inputs.
:raises: :class:`ValueError` if ``eventual`` is ``True`` and the
``transaction_id`` is not ``None``.
"""
if transaction_id is None:
if eventual:
return datastore_pb2.ReadOptions(
read_consistency=datastore_pb2.ReadOptions.EVENTUAL
)
else:
return datastore_pb2.ReadOptions()
else:
if eventual:
raise ValueError("eventual must be False when in a transaction")
else:
return datastore_pb2.ReadOptions(transaction=transaction_id)
|
python
|
def get_read_options(eventual, transaction_id):
"""Validate rules for read options, and assign to the request.
Helper method for ``lookup()`` and ``run_query``.
:type eventual: bool
:param eventual: Flag indicating if ``EVENTUAL`` or ``STRONG``
consistency should be used.
:type transaction_id: bytes
:param transaction_id: A transaction identifier (may be null).
:rtype: :class:`.datastore_pb2.ReadOptions`
:returns: The read options corresponding to the inputs.
:raises: :class:`ValueError` if ``eventual`` is ``True`` and the
``transaction_id`` is not ``None``.
"""
if transaction_id is None:
if eventual:
return datastore_pb2.ReadOptions(
read_consistency=datastore_pb2.ReadOptions.EVENTUAL
)
else:
return datastore_pb2.ReadOptions()
else:
if eventual:
raise ValueError("eventual must be False when in a transaction")
else:
return datastore_pb2.ReadOptions(transaction=transaction_id)
|
[
"def",
"get_read_options",
"(",
"eventual",
",",
"transaction_id",
")",
":",
"if",
"transaction_id",
"is",
"None",
":",
"if",
"eventual",
":",
"return",
"datastore_pb2",
".",
"ReadOptions",
"(",
"read_consistency",
"=",
"datastore_pb2",
".",
"ReadOptions",
".",
"EVENTUAL",
")",
"else",
":",
"return",
"datastore_pb2",
".",
"ReadOptions",
"(",
")",
"else",
":",
"if",
"eventual",
":",
"raise",
"ValueError",
"(",
"\"eventual must be False when in a transaction\"",
")",
"else",
":",
"return",
"datastore_pb2",
".",
"ReadOptions",
"(",
"transaction",
"=",
"transaction_id",
")"
] |
Validate rules for read options, and assign to the request.
Helper method for ``lookup()`` and ``run_query``.
:type eventual: bool
:param eventual: Flag indicating if ``EVENTUAL`` or ``STRONG``
consistency should be used.
:type transaction_id: bytes
:param transaction_id: A transaction identifier (may be null).
:rtype: :class:`.datastore_pb2.ReadOptions`
:returns: The read options corresponding to the inputs.
:raises: :class:`ValueError` if ``eventual`` is ``True`` and the
``transaction_id`` is not ``None``.
|
[
"Validate",
"rules",
"for",
"read",
"options",
"and",
"assign",
"to",
"the",
"request",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/helpers.py#L239-L267
|
train
|
googleapis/google-cloud-python
|
datastore/google/cloud/datastore/helpers.py
|
key_from_protobuf
|
def key_from_protobuf(pb):
"""Factory method for creating a key based on a protobuf.
The protobuf should be one returned from the Cloud Datastore
Protobuf API.
:type pb: :class:`.entity_pb2.Key`
:param pb: The Protobuf representing the key.
:rtype: :class:`google.cloud.datastore.key.Key`
:returns: a new `Key` instance
"""
path_args = []
for element in pb.path:
path_args.append(element.kind)
if element.id: # Simple field (int64)
path_args.append(element.id)
# This is safe: we expect proto objects returned will only have
# one of `name` or `id` set.
if element.name: # Simple field (string)
path_args.append(element.name)
project = None
if pb.partition_id.project_id: # Simple field (string)
project = pb.partition_id.project_id
namespace = None
if pb.partition_id.namespace_id: # Simple field (string)
namespace = pb.partition_id.namespace_id
return Key(*path_args, namespace=namespace, project=project)
|
python
|
def key_from_protobuf(pb):
"""Factory method for creating a key based on a protobuf.
The protobuf should be one returned from the Cloud Datastore
Protobuf API.
:type pb: :class:`.entity_pb2.Key`
:param pb: The Protobuf representing the key.
:rtype: :class:`google.cloud.datastore.key.Key`
:returns: a new `Key` instance
"""
path_args = []
for element in pb.path:
path_args.append(element.kind)
if element.id: # Simple field (int64)
path_args.append(element.id)
# This is safe: we expect proto objects returned will only have
# one of `name` or `id` set.
if element.name: # Simple field (string)
path_args.append(element.name)
project = None
if pb.partition_id.project_id: # Simple field (string)
project = pb.partition_id.project_id
namespace = None
if pb.partition_id.namespace_id: # Simple field (string)
namespace = pb.partition_id.namespace_id
return Key(*path_args, namespace=namespace, project=project)
|
[
"def",
"key_from_protobuf",
"(",
"pb",
")",
":",
"path_args",
"=",
"[",
"]",
"for",
"element",
"in",
"pb",
".",
"path",
":",
"path_args",
".",
"append",
"(",
"element",
".",
"kind",
")",
"if",
"element",
".",
"id",
":",
"# Simple field (int64)",
"path_args",
".",
"append",
"(",
"element",
".",
"id",
")",
"# This is safe: we expect proto objects returned will only have",
"# one of `name` or `id` set.",
"if",
"element",
".",
"name",
":",
"# Simple field (string)",
"path_args",
".",
"append",
"(",
"element",
".",
"name",
")",
"project",
"=",
"None",
"if",
"pb",
".",
"partition_id",
".",
"project_id",
":",
"# Simple field (string)",
"project",
"=",
"pb",
".",
"partition_id",
".",
"project_id",
"namespace",
"=",
"None",
"if",
"pb",
".",
"partition_id",
".",
"namespace_id",
":",
"# Simple field (string)",
"namespace",
"=",
"pb",
".",
"partition_id",
".",
"namespace_id",
"return",
"Key",
"(",
"*",
"path_args",
",",
"namespace",
"=",
"namespace",
",",
"project",
"=",
"project",
")"
] |
Factory method for creating a key based on a protobuf.
The protobuf should be one returned from the Cloud Datastore
Protobuf API.
:type pb: :class:`.entity_pb2.Key`
:param pb: The Protobuf representing the key.
:rtype: :class:`google.cloud.datastore.key.Key`
:returns: a new `Key` instance
|
[
"Factory",
"method",
"for",
"creating",
"a",
"key",
"based",
"on",
"a",
"protobuf",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/helpers.py#L270-L299
|
train
|
googleapis/google-cloud-python
|
datastore/google/cloud/datastore/helpers.py
|
_pb_attr_value
|
def _pb_attr_value(val):
"""Given a value, return the protobuf attribute name and proper value.
The Protobuf API uses different attribute names based on value types
rather than inferring the type. This function simply determines the
proper attribute name based on the type of the value provided and
returns the attribute name as well as a properly formatted value.
Certain value types need to be coerced into a different type (such
as a `datetime.datetime` into an integer timestamp, or a
`google.cloud.datastore.key.Key` into a Protobuf representation. This
function handles that for you.
.. note::
Values which are "text" ('unicode' in Python2, 'str' in Python3) map
to 'string_value' in the datastore; values which are "bytes"
('str' in Python2, 'bytes' in Python3) map to 'blob_value'.
For example:
>>> _pb_attr_value(1234)
('integer_value', 1234)
>>> _pb_attr_value('my_string')
('string_value', 'my_string')
:type val:
:class:`datetime.datetime`, :class:`google.cloud.datastore.key.Key`,
bool, float, integer, bytes, str, unicode,
:class:`google.cloud.datastore.entity.Entity`, dict, list,
:class:`google.cloud.datastore.helpers.GeoPoint`, NoneType
:param val: The value to be scrutinized.
:rtype: tuple
:returns: A tuple of the attribute name and proper value type.
"""
if isinstance(val, datetime.datetime):
name = "timestamp"
value = _datetime_to_pb_timestamp(val)
elif isinstance(val, Key):
name, value = "key", val.to_protobuf()
elif isinstance(val, bool):
name, value = "boolean", val
elif isinstance(val, float):
name, value = "double", val
elif isinstance(val, six.integer_types):
name, value = "integer", val
elif isinstance(val, six.text_type):
name, value = "string", val
elif isinstance(val, six.binary_type):
name, value = "blob", val
elif isinstance(val, Entity):
name, value = "entity", val
elif isinstance(val, dict):
entity_val = Entity(key=None)
entity_val.update(val)
name, value = "entity", entity_val
elif isinstance(val, list):
name, value = "array", val
elif isinstance(val, GeoPoint):
name, value = "geo_point", val.to_protobuf()
elif val is None:
name, value = "null", struct_pb2.NULL_VALUE
else:
raise ValueError("Unknown protobuf attr type", type(val))
return name + "_value", value
|
python
|
def _pb_attr_value(val):
"""Given a value, return the protobuf attribute name and proper value.
The Protobuf API uses different attribute names based on value types
rather than inferring the type. This function simply determines the
proper attribute name based on the type of the value provided and
returns the attribute name as well as a properly formatted value.
Certain value types need to be coerced into a different type (such
as a `datetime.datetime` into an integer timestamp, or a
`google.cloud.datastore.key.Key` into a Protobuf representation. This
function handles that for you.
.. note::
Values which are "text" ('unicode' in Python2, 'str' in Python3) map
to 'string_value' in the datastore; values which are "bytes"
('str' in Python2, 'bytes' in Python3) map to 'blob_value'.
For example:
>>> _pb_attr_value(1234)
('integer_value', 1234)
>>> _pb_attr_value('my_string')
('string_value', 'my_string')
:type val:
:class:`datetime.datetime`, :class:`google.cloud.datastore.key.Key`,
bool, float, integer, bytes, str, unicode,
:class:`google.cloud.datastore.entity.Entity`, dict, list,
:class:`google.cloud.datastore.helpers.GeoPoint`, NoneType
:param val: The value to be scrutinized.
:rtype: tuple
:returns: A tuple of the attribute name and proper value type.
"""
if isinstance(val, datetime.datetime):
name = "timestamp"
value = _datetime_to_pb_timestamp(val)
elif isinstance(val, Key):
name, value = "key", val.to_protobuf()
elif isinstance(val, bool):
name, value = "boolean", val
elif isinstance(val, float):
name, value = "double", val
elif isinstance(val, six.integer_types):
name, value = "integer", val
elif isinstance(val, six.text_type):
name, value = "string", val
elif isinstance(val, six.binary_type):
name, value = "blob", val
elif isinstance(val, Entity):
name, value = "entity", val
elif isinstance(val, dict):
entity_val = Entity(key=None)
entity_val.update(val)
name, value = "entity", entity_val
elif isinstance(val, list):
name, value = "array", val
elif isinstance(val, GeoPoint):
name, value = "geo_point", val.to_protobuf()
elif val is None:
name, value = "null", struct_pb2.NULL_VALUE
else:
raise ValueError("Unknown protobuf attr type", type(val))
return name + "_value", value
|
[
"def",
"_pb_attr_value",
"(",
"val",
")",
":",
"if",
"isinstance",
"(",
"val",
",",
"datetime",
".",
"datetime",
")",
":",
"name",
"=",
"\"timestamp\"",
"value",
"=",
"_datetime_to_pb_timestamp",
"(",
"val",
")",
"elif",
"isinstance",
"(",
"val",
",",
"Key",
")",
":",
"name",
",",
"value",
"=",
"\"key\"",
",",
"val",
".",
"to_protobuf",
"(",
")",
"elif",
"isinstance",
"(",
"val",
",",
"bool",
")",
":",
"name",
",",
"value",
"=",
"\"boolean\"",
",",
"val",
"elif",
"isinstance",
"(",
"val",
",",
"float",
")",
":",
"name",
",",
"value",
"=",
"\"double\"",
",",
"val",
"elif",
"isinstance",
"(",
"val",
",",
"six",
".",
"integer_types",
")",
":",
"name",
",",
"value",
"=",
"\"integer\"",
",",
"val",
"elif",
"isinstance",
"(",
"val",
",",
"six",
".",
"text_type",
")",
":",
"name",
",",
"value",
"=",
"\"string\"",
",",
"val",
"elif",
"isinstance",
"(",
"val",
",",
"six",
".",
"binary_type",
")",
":",
"name",
",",
"value",
"=",
"\"blob\"",
",",
"val",
"elif",
"isinstance",
"(",
"val",
",",
"Entity",
")",
":",
"name",
",",
"value",
"=",
"\"entity\"",
",",
"val",
"elif",
"isinstance",
"(",
"val",
",",
"dict",
")",
":",
"entity_val",
"=",
"Entity",
"(",
"key",
"=",
"None",
")",
"entity_val",
".",
"update",
"(",
"val",
")",
"name",
",",
"value",
"=",
"\"entity\"",
",",
"entity_val",
"elif",
"isinstance",
"(",
"val",
",",
"list",
")",
":",
"name",
",",
"value",
"=",
"\"array\"",
",",
"val",
"elif",
"isinstance",
"(",
"val",
",",
"GeoPoint",
")",
":",
"name",
",",
"value",
"=",
"\"geo_point\"",
",",
"val",
".",
"to_protobuf",
"(",
")",
"elif",
"val",
"is",
"None",
":",
"name",
",",
"value",
"=",
"\"null\"",
",",
"struct_pb2",
".",
"NULL_VALUE",
"else",
":",
"raise",
"ValueError",
"(",
"\"Unknown protobuf attr type\"",
",",
"type",
"(",
"val",
")",
")",
"return",
"name",
"+",
"\"_value\"",
",",
"value"
] |
Given a value, return the protobuf attribute name and proper value.
The Protobuf API uses different attribute names based on value types
rather than inferring the type. This function simply determines the
proper attribute name based on the type of the value provided and
returns the attribute name as well as a properly formatted value.
Certain value types need to be coerced into a different type (such
as a `datetime.datetime` into an integer timestamp, or a
`google.cloud.datastore.key.Key` into a Protobuf representation. This
function handles that for you.
.. note::
Values which are "text" ('unicode' in Python2, 'str' in Python3) map
to 'string_value' in the datastore; values which are "bytes"
('str' in Python2, 'bytes' in Python3) map to 'blob_value'.
For example:
>>> _pb_attr_value(1234)
('integer_value', 1234)
>>> _pb_attr_value('my_string')
('string_value', 'my_string')
:type val:
:class:`datetime.datetime`, :class:`google.cloud.datastore.key.Key`,
bool, float, integer, bytes, str, unicode,
:class:`google.cloud.datastore.entity.Entity`, dict, list,
:class:`google.cloud.datastore.helpers.GeoPoint`, NoneType
:param val: The value to be scrutinized.
:rtype: tuple
:returns: A tuple of the attribute name and proper value type.
|
[
"Given",
"a",
"value",
"return",
"the",
"protobuf",
"attribute",
"name",
"and",
"proper",
"value",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/helpers.py#L302-L368
|
train
|
googleapis/google-cloud-python
|
datastore/google/cloud/datastore/helpers.py
|
_get_value_from_value_pb
|
def _get_value_from_value_pb(value_pb):
"""Given a protobuf for a Value, get the correct value.
The Cloud Datastore Protobuf API returns a Property Protobuf which
has one value set and the rest blank. This function retrieves the
the one value provided.
Some work is done to coerce the return value into a more useful type
(particularly in the case of a timestamp value, or a key value).
:type value_pb: :class:`.entity_pb2.Value`
:param value_pb: The Value Protobuf.
:rtype: object
:returns: The value provided by the Protobuf.
:raises: :class:`ValueError <exceptions.ValueError>` if no value type
has been set.
"""
value_type = value_pb.WhichOneof("value_type")
if value_type == "timestamp_value":
result = _pb_timestamp_to_datetime(value_pb.timestamp_value)
elif value_type == "key_value":
result = key_from_protobuf(value_pb.key_value)
elif value_type == "boolean_value":
result = value_pb.boolean_value
elif value_type == "double_value":
result = value_pb.double_value
elif value_type == "integer_value":
result = value_pb.integer_value
elif value_type == "string_value":
result = value_pb.string_value
elif value_type == "blob_value":
result = value_pb.blob_value
elif value_type == "entity_value":
result = entity_from_protobuf(value_pb.entity_value)
elif value_type == "array_value":
result = [
_get_value_from_value_pb(value) for value in value_pb.array_value.values
]
elif value_type == "geo_point_value":
result = GeoPoint(
value_pb.geo_point_value.latitude, value_pb.geo_point_value.longitude
)
elif value_type == "null_value":
result = None
else:
raise ValueError("Value protobuf did not have any value set")
return result
|
python
|
def _get_value_from_value_pb(value_pb):
"""Given a protobuf for a Value, get the correct value.
The Cloud Datastore Protobuf API returns a Property Protobuf which
has one value set and the rest blank. This function retrieves the
the one value provided.
Some work is done to coerce the return value into a more useful type
(particularly in the case of a timestamp value, or a key value).
:type value_pb: :class:`.entity_pb2.Value`
:param value_pb: The Value Protobuf.
:rtype: object
:returns: The value provided by the Protobuf.
:raises: :class:`ValueError <exceptions.ValueError>` if no value type
has been set.
"""
value_type = value_pb.WhichOneof("value_type")
if value_type == "timestamp_value":
result = _pb_timestamp_to_datetime(value_pb.timestamp_value)
elif value_type == "key_value":
result = key_from_protobuf(value_pb.key_value)
elif value_type == "boolean_value":
result = value_pb.boolean_value
elif value_type == "double_value":
result = value_pb.double_value
elif value_type == "integer_value":
result = value_pb.integer_value
elif value_type == "string_value":
result = value_pb.string_value
elif value_type == "blob_value":
result = value_pb.blob_value
elif value_type == "entity_value":
result = entity_from_protobuf(value_pb.entity_value)
elif value_type == "array_value":
result = [
_get_value_from_value_pb(value) for value in value_pb.array_value.values
]
elif value_type == "geo_point_value":
result = GeoPoint(
value_pb.geo_point_value.latitude, value_pb.geo_point_value.longitude
)
elif value_type == "null_value":
result = None
else:
raise ValueError("Value protobuf did not have any value set")
return result
|
[
"def",
"_get_value_from_value_pb",
"(",
"value_pb",
")",
":",
"value_type",
"=",
"value_pb",
".",
"WhichOneof",
"(",
"\"value_type\"",
")",
"if",
"value_type",
"==",
"\"timestamp_value\"",
":",
"result",
"=",
"_pb_timestamp_to_datetime",
"(",
"value_pb",
".",
"timestamp_value",
")",
"elif",
"value_type",
"==",
"\"key_value\"",
":",
"result",
"=",
"key_from_protobuf",
"(",
"value_pb",
".",
"key_value",
")",
"elif",
"value_type",
"==",
"\"boolean_value\"",
":",
"result",
"=",
"value_pb",
".",
"boolean_value",
"elif",
"value_type",
"==",
"\"double_value\"",
":",
"result",
"=",
"value_pb",
".",
"double_value",
"elif",
"value_type",
"==",
"\"integer_value\"",
":",
"result",
"=",
"value_pb",
".",
"integer_value",
"elif",
"value_type",
"==",
"\"string_value\"",
":",
"result",
"=",
"value_pb",
".",
"string_value",
"elif",
"value_type",
"==",
"\"blob_value\"",
":",
"result",
"=",
"value_pb",
".",
"blob_value",
"elif",
"value_type",
"==",
"\"entity_value\"",
":",
"result",
"=",
"entity_from_protobuf",
"(",
"value_pb",
".",
"entity_value",
")",
"elif",
"value_type",
"==",
"\"array_value\"",
":",
"result",
"=",
"[",
"_get_value_from_value_pb",
"(",
"value",
")",
"for",
"value",
"in",
"value_pb",
".",
"array_value",
".",
"values",
"]",
"elif",
"value_type",
"==",
"\"geo_point_value\"",
":",
"result",
"=",
"GeoPoint",
"(",
"value_pb",
".",
"geo_point_value",
".",
"latitude",
",",
"value_pb",
".",
"geo_point_value",
".",
"longitude",
")",
"elif",
"value_type",
"==",
"\"null_value\"",
":",
"result",
"=",
"None",
"else",
":",
"raise",
"ValueError",
"(",
"\"Value protobuf did not have any value set\"",
")",
"return",
"result"
] |
Given a protobuf for a Value, get the correct value.
The Cloud Datastore Protobuf API returns a Property Protobuf which
has one value set and the rest blank. This function retrieves the
the one value provided.
Some work is done to coerce the return value into a more useful type
(particularly in the case of a timestamp value, or a key value).
:type value_pb: :class:`.entity_pb2.Value`
:param value_pb: The Value Protobuf.
:rtype: object
:returns: The value provided by the Protobuf.
:raises: :class:`ValueError <exceptions.ValueError>` if no value type
has been set.
|
[
"Given",
"a",
"protobuf",
"for",
"a",
"Value",
"get",
"the",
"correct",
"value",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/helpers.py#L371-L431
|
train
|
googleapis/google-cloud-python
|
datastore/google/cloud/datastore/helpers.py
|
_set_protobuf_value
|
def _set_protobuf_value(value_pb, val):
"""Assign 'val' to the correct subfield of 'value_pb'.
The Protobuf API uses different attribute names based on value types
rather than inferring the type.
Some value types (entities, keys, lists) cannot be directly
assigned; this function handles them correctly.
:type value_pb: :class:`.entity_pb2.Value`
:param value_pb: The value protobuf to which the value is being assigned.
:type val: :class:`datetime.datetime`, boolean, float, integer, string,
:class:`google.cloud.datastore.key.Key`,
:class:`google.cloud.datastore.entity.Entity`
:param val: The value to be assigned.
"""
attr, val = _pb_attr_value(val)
if attr == "key_value":
value_pb.key_value.CopyFrom(val)
elif attr == "timestamp_value":
value_pb.timestamp_value.CopyFrom(val)
elif attr == "entity_value":
entity_pb = entity_to_protobuf(val)
value_pb.entity_value.CopyFrom(entity_pb)
elif attr == "array_value":
if len(val) == 0:
array_value = entity_pb2.ArrayValue(values=[])
value_pb.array_value.CopyFrom(array_value)
else:
l_pb = value_pb.array_value.values
for item in val:
i_pb = l_pb.add()
_set_protobuf_value(i_pb, item)
elif attr == "geo_point_value":
value_pb.geo_point_value.CopyFrom(val)
else: # scalar, just assign
setattr(value_pb, attr, val)
|
python
|
def _set_protobuf_value(value_pb, val):
"""Assign 'val' to the correct subfield of 'value_pb'.
The Protobuf API uses different attribute names based on value types
rather than inferring the type.
Some value types (entities, keys, lists) cannot be directly
assigned; this function handles them correctly.
:type value_pb: :class:`.entity_pb2.Value`
:param value_pb: The value protobuf to which the value is being assigned.
:type val: :class:`datetime.datetime`, boolean, float, integer, string,
:class:`google.cloud.datastore.key.Key`,
:class:`google.cloud.datastore.entity.Entity`
:param val: The value to be assigned.
"""
attr, val = _pb_attr_value(val)
if attr == "key_value":
value_pb.key_value.CopyFrom(val)
elif attr == "timestamp_value":
value_pb.timestamp_value.CopyFrom(val)
elif attr == "entity_value":
entity_pb = entity_to_protobuf(val)
value_pb.entity_value.CopyFrom(entity_pb)
elif attr == "array_value":
if len(val) == 0:
array_value = entity_pb2.ArrayValue(values=[])
value_pb.array_value.CopyFrom(array_value)
else:
l_pb = value_pb.array_value.values
for item in val:
i_pb = l_pb.add()
_set_protobuf_value(i_pb, item)
elif attr == "geo_point_value":
value_pb.geo_point_value.CopyFrom(val)
else: # scalar, just assign
setattr(value_pb, attr, val)
|
[
"def",
"_set_protobuf_value",
"(",
"value_pb",
",",
"val",
")",
":",
"attr",
",",
"val",
"=",
"_pb_attr_value",
"(",
"val",
")",
"if",
"attr",
"==",
"\"key_value\"",
":",
"value_pb",
".",
"key_value",
".",
"CopyFrom",
"(",
"val",
")",
"elif",
"attr",
"==",
"\"timestamp_value\"",
":",
"value_pb",
".",
"timestamp_value",
".",
"CopyFrom",
"(",
"val",
")",
"elif",
"attr",
"==",
"\"entity_value\"",
":",
"entity_pb",
"=",
"entity_to_protobuf",
"(",
"val",
")",
"value_pb",
".",
"entity_value",
".",
"CopyFrom",
"(",
"entity_pb",
")",
"elif",
"attr",
"==",
"\"array_value\"",
":",
"if",
"len",
"(",
"val",
")",
"==",
"0",
":",
"array_value",
"=",
"entity_pb2",
".",
"ArrayValue",
"(",
"values",
"=",
"[",
"]",
")",
"value_pb",
".",
"array_value",
".",
"CopyFrom",
"(",
"array_value",
")",
"else",
":",
"l_pb",
"=",
"value_pb",
".",
"array_value",
".",
"values",
"for",
"item",
"in",
"val",
":",
"i_pb",
"=",
"l_pb",
".",
"add",
"(",
")",
"_set_protobuf_value",
"(",
"i_pb",
",",
"item",
")",
"elif",
"attr",
"==",
"\"geo_point_value\"",
":",
"value_pb",
".",
"geo_point_value",
".",
"CopyFrom",
"(",
"val",
")",
"else",
":",
"# scalar, just assign",
"setattr",
"(",
"value_pb",
",",
"attr",
",",
"val",
")"
] |
Assign 'val' to the correct subfield of 'value_pb'.
The Protobuf API uses different attribute names based on value types
rather than inferring the type.
Some value types (entities, keys, lists) cannot be directly
assigned; this function handles them correctly.
:type value_pb: :class:`.entity_pb2.Value`
:param value_pb: The value protobuf to which the value is being assigned.
:type val: :class:`datetime.datetime`, boolean, float, integer, string,
:class:`google.cloud.datastore.key.Key`,
:class:`google.cloud.datastore.entity.Entity`
:param val: The value to be assigned.
|
[
"Assign",
"val",
"to",
"the",
"correct",
"subfield",
"of",
"value_pb",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/helpers.py#L434-L471
|
train
|
googleapis/google-cloud-python
|
datastore/google/cloud/datastore/helpers.py
|
GeoPoint.to_protobuf
|
def to_protobuf(self):
"""Convert the current object to protobuf.
:rtype: :class:`google.type.latlng_pb2.LatLng`.
:returns: The current point as a protobuf.
"""
return latlng_pb2.LatLng(latitude=self.latitude, longitude=self.longitude)
|
python
|
def to_protobuf(self):
"""Convert the current object to protobuf.
:rtype: :class:`google.type.latlng_pb2.LatLng`.
:returns: The current point as a protobuf.
"""
return latlng_pb2.LatLng(latitude=self.latitude, longitude=self.longitude)
|
[
"def",
"to_protobuf",
"(",
"self",
")",
":",
"return",
"latlng_pb2",
".",
"LatLng",
"(",
"latitude",
"=",
"self",
".",
"latitude",
",",
"longitude",
"=",
"self",
".",
"longitude",
")"
] |
Convert the current object to protobuf.
:rtype: :class:`google.type.latlng_pb2.LatLng`.
:returns: The current point as a protobuf.
|
[
"Convert",
"the",
"current",
"object",
"to",
"protobuf",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/datastore/google/cloud/datastore/helpers.py#L488-L494
|
train
|
googleapis/google-cloud-python
|
logging/google/cloud/logging/entries.py
|
LogEntry.from_api_repr
|
def from_api_repr(cls, resource, client, loggers=None):
"""Factory: construct an entry given its API representation
:type resource: dict
:param resource: text entry resource representation returned from
the API
:type client: :class:`google.cloud.logging.client.Client`
:param client: Client which holds credentials and project
configuration.
:type loggers: dict
:param loggers:
(Optional) A mapping of logger fullnames -> loggers. If not
passed, the entry will have a newly-created logger.
:rtype: :class:`google.cloud.logging.entries.LogEntry`
:returns: Log entry parsed from ``resource``.
"""
if loggers is None:
loggers = {}
logger_fullname = resource["logName"]
logger = loggers.get(logger_fullname)
if logger is None:
logger_name = logger_name_from_path(logger_fullname)
logger = loggers[logger_fullname] = client.logger(logger_name)
payload = cls._extract_payload(resource)
insert_id = resource.get("insertId")
timestamp = resource.get("timestamp")
if timestamp is not None:
timestamp = _rfc3339_nanos_to_datetime(timestamp)
labels = resource.get("labels")
severity = resource.get("severity")
http_request = resource.get("httpRequest")
trace = resource.get("trace")
span_id = resource.get("spanId")
trace_sampled = resource.get("traceSampled")
source_location = resource.get("sourceLocation")
if source_location is not None:
line = source_location.pop("line", None)
source_location["line"] = _int_or_none(line)
operation = resource.get("operation")
monitored_resource_dict = resource.get("resource")
monitored_resource = None
if monitored_resource_dict is not None:
monitored_resource = Resource._from_dict(monitored_resource_dict)
inst = cls(
log_name=logger_fullname,
insert_id=insert_id,
timestamp=timestamp,
labels=labels,
severity=severity,
http_request=http_request,
resource=monitored_resource,
trace=trace,
span_id=span_id,
trace_sampled=trace_sampled,
source_location=source_location,
operation=operation,
logger=logger,
payload=payload,
)
received = resource.get("receiveTimestamp")
if received is not None:
inst.received_timestamp = _rfc3339_nanos_to_datetime(received)
return inst
|
python
|
def from_api_repr(cls, resource, client, loggers=None):
"""Factory: construct an entry given its API representation
:type resource: dict
:param resource: text entry resource representation returned from
the API
:type client: :class:`google.cloud.logging.client.Client`
:param client: Client which holds credentials and project
configuration.
:type loggers: dict
:param loggers:
(Optional) A mapping of logger fullnames -> loggers. If not
passed, the entry will have a newly-created logger.
:rtype: :class:`google.cloud.logging.entries.LogEntry`
:returns: Log entry parsed from ``resource``.
"""
if loggers is None:
loggers = {}
logger_fullname = resource["logName"]
logger = loggers.get(logger_fullname)
if logger is None:
logger_name = logger_name_from_path(logger_fullname)
logger = loggers[logger_fullname] = client.logger(logger_name)
payload = cls._extract_payload(resource)
insert_id = resource.get("insertId")
timestamp = resource.get("timestamp")
if timestamp is not None:
timestamp = _rfc3339_nanos_to_datetime(timestamp)
labels = resource.get("labels")
severity = resource.get("severity")
http_request = resource.get("httpRequest")
trace = resource.get("trace")
span_id = resource.get("spanId")
trace_sampled = resource.get("traceSampled")
source_location = resource.get("sourceLocation")
if source_location is not None:
line = source_location.pop("line", None)
source_location["line"] = _int_or_none(line)
operation = resource.get("operation")
monitored_resource_dict = resource.get("resource")
monitored_resource = None
if monitored_resource_dict is not None:
monitored_resource = Resource._from_dict(monitored_resource_dict)
inst = cls(
log_name=logger_fullname,
insert_id=insert_id,
timestamp=timestamp,
labels=labels,
severity=severity,
http_request=http_request,
resource=monitored_resource,
trace=trace,
span_id=span_id,
trace_sampled=trace_sampled,
source_location=source_location,
operation=operation,
logger=logger,
payload=payload,
)
received = resource.get("receiveTimestamp")
if received is not None:
inst.received_timestamp = _rfc3339_nanos_to_datetime(received)
return inst
|
[
"def",
"from_api_repr",
"(",
"cls",
",",
"resource",
",",
"client",
",",
"loggers",
"=",
"None",
")",
":",
"if",
"loggers",
"is",
"None",
":",
"loggers",
"=",
"{",
"}",
"logger_fullname",
"=",
"resource",
"[",
"\"logName\"",
"]",
"logger",
"=",
"loggers",
".",
"get",
"(",
"logger_fullname",
")",
"if",
"logger",
"is",
"None",
":",
"logger_name",
"=",
"logger_name_from_path",
"(",
"logger_fullname",
")",
"logger",
"=",
"loggers",
"[",
"logger_fullname",
"]",
"=",
"client",
".",
"logger",
"(",
"logger_name",
")",
"payload",
"=",
"cls",
".",
"_extract_payload",
"(",
"resource",
")",
"insert_id",
"=",
"resource",
".",
"get",
"(",
"\"insertId\"",
")",
"timestamp",
"=",
"resource",
".",
"get",
"(",
"\"timestamp\"",
")",
"if",
"timestamp",
"is",
"not",
"None",
":",
"timestamp",
"=",
"_rfc3339_nanos_to_datetime",
"(",
"timestamp",
")",
"labels",
"=",
"resource",
".",
"get",
"(",
"\"labels\"",
")",
"severity",
"=",
"resource",
".",
"get",
"(",
"\"severity\"",
")",
"http_request",
"=",
"resource",
".",
"get",
"(",
"\"httpRequest\"",
")",
"trace",
"=",
"resource",
".",
"get",
"(",
"\"trace\"",
")",
"span_id",
"=",
"resource",
".",
"get",
"(",
"\"spanId\"",
")",
"trace_sampled",
"=",
"resource",
".",
"get",
"(",
"\"traceSampled\"",
")",
"source_location",
"=",
"resource",
".",
"get",
"(",
"\"sourceLocation\"",
")",
"if",
"source_location",
"is",
"not",
"None",
":",
"line",
"=",
"source_location",
".",
"pop",
"(",
"\"line\"",
",",
"None",
")",
"source_location",
"[",
"\"line\"",
"]",
"=",
"_int_or_none",
"(",
"line",
")",
"operation",
"=",
"resource",
".",
"get",
"(",
"\"operation\"",
")",
"monitored_resource_dict",
"=",
"resource",
".",
"get",
"(",
"\"resource\"",
")",
"monitored_resource",
"=",
"None",
"if",
"monitored_resource_dict",
"is",
"not",
"None",
":",
"monitored_resource",
"=",
"Resource",
".",
"_from_dict",
"(",
"monitored_resource_dict",
")",
"inst",
"=",
"cls",
"(",
"log_name",
"=",
"logger_fullname",
",",
"insert_id",
"=",
"insert_id",
",",
"timestamp",
"=",
"timestamp",
",",
"labels",
"=",
"labels",
",",
"severity",
"=",
"severity",
",",
"http_request",
"=",
"http_request",
",",
"resource",
"=",
"monitored_resource",
",",
"trace",
"=",
"trace",
",",
"span_id",
"=",
"span_id",
",",
"trace_sampled",
"=",
"trace_sampled",
",",
"source_location",
"=",
"source_location",
",",
"operation",
"=",
"operation",
",",
"logger",
"=",
"logger",
",",
"payload",
"=",
"payload",
",",
")",
"received",
"=",
"resource",
".",
"get",
"(",
"\"receiveTimestamp\"",
")",
"if",
"received",
"is",
"not",
"None",
":",
"inst",
".",
"received_timestamp",
"=",
"_rfc3339_nanos_to_datetime",
"(",
"received",
")",
"return",
"inst"
] |
Factory: construct an entry given its API representation
:type resource: dict
:param resource: text entry resource representation returned from
the API
:type client: :class:`google.cloud.logging.client.Client`
:param client: Client which holds credentials and project
configuration.
:type loggers: dict
:param loggers:
(Optional) A mapping of logger fullnames -> loggers. If not
passed, the entry will have a newly-created logger.
:rtype: :class:`google.cloud.logging.entries.LogEntry`
:returns: Log entry parsed from ``resource``.
|
[
"Factory",
":",
"construct",
"an",
"entry",
"given",
"its",
"API",
"representation"
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/entries.py#L165-L232
|
train
|
googleapis/google-cloud-python
|
logging/google/cloud/logging/entries.py
|
LogEntry.to_api_repr
|
def to_api_repr(self):
"""API repr (JSON format) for entry.
"""
info = {}
if self.log_name is not None:
info["logName"] = self.log_name
if self.resource is not None:
info["resource"] = self.resource._to_dict()
if self.labels is not None:
info["labels"] = self.labels
if self.insert_id is not None:
info["insertId"] = self.insert_id
if self.severity is not None:
info["severity"] = self.severity
if self.http_request is not None:
info["httpRequest"] = self.http_request
if self.timestamp is not None:
info["timestamp"] = _datetime_to_rfc3339(self.timestamp)
if self.trace is not None:
info["trace"] = self.trace
if self.span_id is not None:
info["spanId"] = self.span_id
if self.trace_sampled is not None:
info["traceSampled"] = self.trace_sampled
if self.source_location is not None:
source_location = self.source_location.copy()
source_location["line"] = str(source_location.pop("line", 0))
info["sourceLocation"] = source_location
if self.operation is not None:
info["operation"] = self.operation
return info
|
python
|
def to_api_repr(self):
"""API repr (JSON format) for entry.
"""
info = {}
if self.log_name is not None:
info["logName"] = self.log_name
if self.resource is not None:
info["resource"] = self.resource._to_dict()
if self.labels is not None:
info["labels"] = self.labels
if self.insert_id is not None:
info["insertId"] = self.insert_id
if self.severity is not None:
info["severity"] = self.severity
if self.http_request is not None:
info["httpRequest"] = self.http_request
if self.timestamp is not None:
info["timestamp"] = _datetime_to_rfc3339(self.timestamp)
if self.trace is not None:
info["trace"] = self.trace
if self.span_id is not None:
info["spanId"] = self.span_id
if self.trace_sampled is not None:
info["traceSampled"] = self.trace_sampled
if self.source_location is not None:
source_location = self.source_location.copy()
source_location["line"] = str(source_location.pop("line", 0))
info["sourceLocation"] = source_location
if self.operation is not None:
info["operation"] = self.operation
return info
|
[
"def",
"to_api_repr",
"(",
"self",
")",
":",
"info",
"=",
"{",
"}",
"if",
"self",
".",
"log_name",
"is",
"not",
"None",
":",
"info",
"[",
"\"logName\"",
"]",
"=",
"self",
".",
"log_name",
"if",
"self",
".",
"resource",
"is",
"not",
"None",
":",
"info",
"[",
"\"resource\"",
"]",
"=",
"self",
".",
"resource",
".",
"_to_dict",
"(",
")",
"if",
"self",
".",
"labels",
"is",
"not",
"None",
":",
"info",
"[",
"\"labels\"",
"]",
"=",
"self",
".",
"labels",
"if",
"self",
".",
"insert_id",
"is",
"not",
"None",
":",
"info",
"[",
"\"insertId\"",
"]",
"=",
"self",
".",
"insert_id",
"if",
"self",
".",
"severity",
"is",
"not",
"None",
":",
"info",
"[",
"\"severity\"",
"]",
"=",
"self",
".",
"severity",
"if",
"self",
".",
"http_request",
"is",
"not",
"None",
":",
"info",
"[",
"\"httpRequest\"",
"]",
"=",
"self",
".",
"http_request",
"if",
"self",
".",
"timestamp",
"is",
"not",
"None",
":",
"info",
"[",
"\"timestamp\"",
"]",
"=",
"_datetime_to_rfc3339",
"(",
"self",
".",
"timestamp",
")",
"if",
"self",
".",
"trace",
"is",
"not",
"None",
":",
"info",
"[",
"\"trace\"",
"]",
"=",
"self",
".",
"trace",
"if",
"self",
".",
"span_id",
"is",
"not",
"None",
":",
"info",
"[",
"\"spanId\"",
"]",
"=",
"self",
".",
"span_id",
"if",
"self",
".",
"trace_sampled",
"is",
"not",
"None",
":",
"info",
"[",
"\"traceSampled\"",
"]",
"=",
"self",
".",
"trace_sampled",
"if",
"self",
".",
"source_location",
"is",
"not",
"None",
":",
"source_location",
"=",
"self",
".",
"source_location",
".",
"copy",
"(",
")",
"source_location",
"[",
"\"line\"",
"]",
"=",
"str",
"(",
"source_location",
".",
"pop",
"(",
"\"line\"",
",",
"0",
")",
")",
"info",
"[",
"\"sourceLocation\"",
"]",
"=",
"source_location",
"if",
"self",
".",
"operation",
"is",
"not",
"None",
":",
"info",
"[",
"\"operation\"",
"]",
"=",
"self",
".",
"operation",
"return",
"info"
] |
API repr (JSON format) for entry.
|
[
"API",
"repr",
"(",
"JSON",
"format",
")",
"for",
"entry",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/entries.py#L234-L264
|
train
|
googleapis/google-cloud-python
|
logging/google/cloud/logging/entries.py
|
TextEntry.to_api_repr
|
def to_api_repr(self):
"""API repr (JSON format) for entry.
"""
info = super(TextEntry, self).to_api_repr()
info["textPayload"] = self.payload
return info
|
python
|
def to_api_repr(self):
"""API repr (JSON format) for entry.
"""
info = super(TextEntry, self).to_api_repr()
info["textPayload"] = self.payload
return info
|
[
"def",
"to_api_repr",
"(",
"self",
")",
":",
"info",
"=",
"super",
"(",
"TextEntry",
",",
"self",
")",
".",
"to_api_repr",
"(",
")",
"info",
"[",
"\"textPayload\"",
"]",
"=",
"self",
".",
"payload",
"return",
"info"
] |
API repr (JSON format) for entry.
|
[
"API",
"repr",
"(",
"JSON",
"format",
")",
"for",
"entry",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/entries.py#L287-L292
|
train
|
googleapis/google-cloud-python
|
logging/google/cloud/logging/entries.py
|
StructEntry.to_api_repr
|
def to_api_repr(self):
"""API repr (JSON format) for entry.
"""
info = super(StructEntry, self).to_api_repr()
info["jsonPayload"] = self.payload
return info
|
python
|
def to_api_repr(self):
"""API repr (JSON format) for entry.
"""
info = super(StructEntry, self).to_api_repr()
info["jsonPayload"] = self.payload
return info
|
[
"def",
"to_api_repr",
"(",
"self",
")",
":",
"info",
"=",
"super",
"(",
"StructEntry",
",",
"self",
")",
".",
"to_api_repr",
"(",
")",
"info",
"[",
"\"jsonPayload\"",
"]",
"=",
"self",
".",
"payload",
"return",
"info"
] |
API repr (JSON format) for entry.
|
[
"API",
"repr",
"(",
"JSON",
"format",
")",
"for",
"entry",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/entries.py#L315-L320
|
train
|
googleapis/google-cloud-python
|
logging/google/cloud/logging/entries.py
|
ProtobufEntry.to_api_repr
|
def to_api_repr(self):
"""API repr (JSON format) for entry.
"""
info = super(ProtobufEntry, self).to_api_repr()
info["protoPayload"] = MessageToDict(self.payload)
return info
|
python
|
def to_api_repr(self):
"""API repr (JSON format) for entry.
"""
info = super(ProtobufEntry, self).to_api_repr()
info["protoPayload"] = MessageToDict(self.payload)
return info
|
[
"def",
"to_api_repr",
"(",
"self",
")",
":",
"info",
"=",
"super",
"(",
"ProtobufEntry",
",",
"self",
")",
".",
"to_api_repr",
"(",
")",
"info",
"[",
"\"protoPayload\"",
"]",
"=",
"MessageToDict",
"(",
"self",
".",
"payload",
")",
"return",
"info"
] |
API repr (JSON format) for entry.
|
[
"API",
"repr",
"(",
"JSON",
"format",
")",
"for",
"entry",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/entries.py#L353-L358
|
train
|
googleapis/google-cloud-python
|
bigquery/google/cloud/bigquery/dbapi/types.py
|
TimeFromTicks
|
def TimeFromTicks(ticks, tz=None):
"""Construct a DB-API time value from the given ticks value.
:type ticks: float
:param ticks:
a number of seconds since the epoch; see the documentation of the
standard Python time module for details.
:type tz: :class:`datetime.tzinfo`
:param tz: (Optional) time zone to use for conversion
:rtype: :class:`datetime.time`
:returns: time represented by ticks.
"""
dt = datetime.datetime.fromtimestamp(ticks, tz=tz)
return dt.timetz()
|
python
|
def TimeFromTicks(ticks, tz=None):
"""Construct a DB-API time value from the given ticks value.
:type ticks: float
:param ticks:
a number of seconds since the epoch; see the documentation of the
standard Python time module for details.
:type tz: :class:`datetime.tzinfo`
:param tz: (Optional) time zone to use for conversion
:rtype: :class:`datetime.time`
:returns: time represented by ticks.
"""
dt = datetime.datetime.fromtimestamp(ticks, tz=tz)
return dt.timetz()
|
[
"def",
"TimeFromTicks",
"(",
"ticks",
",",
"tz",
"=",
"None",
")",
":",
"dt",
"=",
"datetime",
".",
"datetime",
".",
"fromtimestamp",
"(",
"ticks",
",",
"tz",
"=",
"tz",
")",
"return",
"dt",
".",
"timetz",
"(",
")"
] |
Construct a DB-API time value from the given ticks value.
:type ticks: float
:param ticks:
a number of seconds since the epoch; see the documentation of the
standard Python time module for details.
:type tz: :class:`datetime.tzinfo`
:param tz: (Optional) time zone to use for conversion
:rtype: :class:`datetime.time`
:returns: time represented by ticks.
|
[
"Construct",
"a",
"DB",
"-",
"API",
"time",
"value",
"from",
"the",
"given",
"ticks",
"value",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigquery/google/cloud/bigquery/dbapi/types.py#L45-L60
|
train
|
googleapis/google-cloud-python
|
iot/google/cloud/iot_v1/gapic/device_manager_client.py
|
DeviceManagerClient.registry_path
|
def registry_path(cls, project, location, registry):
"""Return a fully-qualified registry string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/registries/{registry}",
project=project,
location=location,
registry=registry,
)
|
python
|
def registry_path(cls, project, location, registry):
"""Return a fully-qualified registry string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/registries/{registry}",
project=project,
location=location,
registry=registry,
)
|
[
"def",
"registry_path",
"(",
"cls",
",",
"project",
",",
"location",
",",
"registry",
")",
":",
"return",
"google",
".",
"api_core",
".",
"path_template",
".",
"expand",
"(",
"\"projects/{project}/locations/{location}/registries/{registry}\"",
",",
"project",
"=",
"project",
",",
"location",
"=",
"location",
",",
"registry",
"=",
"registry",
",",
")"
] |
Return a fully-qualified registry string.
|
[
"Return",
"a",
"fully",
"-",
"qualified",
"registry",
"string",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/iot/google/cloud/iot_v1/gapic/device_manager_client.py#L86-L93
|
train
|
googleapis/google-cloud-python
|
iot/google/cloud/iot_v1/gapic/device_manager_client.py
|
DeviceManagerClient.device_path
|
def device_path(cls, project, location, registry, device):
"""Return a fully-qualified device string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/registries/{registry}/devices/{device}",
project=project,
location=location,
registry=registry,
device=device,
)
|
python
|
def device_path(cls, project, location, registry, device):
"""Return a fully-qualified device string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/registries/{registry}/devices/{device}",
project=project,
location=location,
registry=registry,
device=device,
)
|
[
"def",
"device_path",
"(",
"cls",
",",
"project",
",",
"location",
",",
"registry",
",",
"device",
")",
":",
"return",
"google",
".",
"api_core",
".",
"path_template",
".",
"expand",
"(",
"\"projects/{project}/locations/{location}/registries/{registry}/devices/{device}\"",
",",
"project",
"=",
"project",
",",
"location",
"=",
"location",
",",
"registry",
"=",
"registry",
",",
"device",
"=",
"device",
",",
")"
] |
Return a fully-qualified device string.
|
[
"Return",
"a",
"fully",
"-",
"qualified",
"device",
"string",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/iot/google/cloud/iot_v1/gapic/device_manager_client.py#L96-L104
|
train
|
googleapis/google-cloud-python
|
iot/google/cloud/iot_v1/gapic/device_manager_client.py
|
DeviceManagerClient.create_device
|
def create_device(
self,
parent,
device,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a device in a device registry.
Example:
>>> from google.cloud import iot_v1
>>>
>>> client = iot_v1.DeviceManagerClient()
>>>
>>> parent = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]')
>>>
>>> # TODO: Initialize `device`:
>>> device = {}
>>>
>>> response = client.create_device(parent, device)
Args:
parent (str): The name of the device registry where this device should be created. For
example,
``projects/example-project/locations/us-central1/registries/my-registry``.
device (Union[dict, ~google.cloud.iot_v1.types.Device]): The device registration details. The field ``name`` must be empty. The
server generates ``name`` from the device registry ``id`` and the
``parent`` field.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.iot_v1.types.Device`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.iot_v1.types.Device` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_device" not in self._inner_api_calls:
self._inner_api_calls[
"create_device"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_device,
default_retry=self._method_configs["CreateDevice"].retry,
default_timeout=self._method_configs["CreateDevice"].timeout,
client_info=self._client_info,
)
request = device_manager_pb2.CreateDeviceRequest(parent=parent, device=device)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_device"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
python
|
def create_device(
self,
parent,
device,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a device in a device registry.
Example:
>>> from google.cloud import iot_v1
>>>
>>> client = iot_v1.DeviceManagerClient()
>>>
>>> parent = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]')
>>>
>>> # TODO: Initialize `device`:
>>> device = {}
>>>
>>> response = client.create_device(parent, device)
Args:
parent (str): The name of the device registry where this device should be created. For
example,
``projects/example-project/locations/us-central1/registries/my-registry``.
device (Union[dict, ~google.cloud.iot_v1.types.Device]): The device registration details. The field ``name`` must be empty. The
server generates ``name`` from the device registry ``id`` and the
``parent`` field.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.iot_v1.types.Device`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.iot_v1.types.Device` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_device" not in self._inner_api_calls:
self._inner_api_calls[
"create_device"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_device,
default_retry=self._method_configs["CreateDevice"].retry,
default_timeout=self._method_configs["CreateDevice"].timeout,
client_info=self._client_info,
)
request = device_manager_pb2.CreateDeviceRequest(parent=parent, device=device)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_device"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
[
"def",
"create_device",
"(",
"self",
",",
"parent",
",",
"device",
",",
"retry",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"metadata",
"=",
"None",
",",
")",
":",
"# Wrap the transport method to add retry and timeout logic.",
"if",
"\"create_device\"",
"not",
"in",
"self",
".",
"_inner_api_calls",
":",
"self",
".",
"_inner_api_calls",
"[",
"\"create_device\"",
"]",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"wrap_method",
"(",
"self",
".",
"transport",
".",
"create_device",
",",
"default_retry",
"=",
"self",
".",
"_method_configs",
"[",
"\"CreateDevice\"",
"]",
".",
"retry",
",",
"default_timeout",
"=",
"self",
".",
"_method_configs",
"[",
"\"CreateDevice\"",
"]",
".",
"timeout",
",",
"client_info",
"=",
"self",
".",
"_client_info",
",",
")",
"request",
"=",
"device_manager_pb2",
".",
"CreateDeviceRequest",
"(",
"parent",
"=",
"parent",
",",
"device",
"=",
"device",
")",
"if",
"metadata",
"is",
"None",
":",
"metadata",
"=",
"[",
"]",
"metadata",
"=",
"list",
"(",
"metadata",
")",
"try",
":",
"routing_header",
"=",
"[",
"(",
"\"parent\"",
",",
"parent",
")",
"]",
"except",
"AttributeError",
":",
"pass",
"else",
":",
"routing_metadata",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"routing_header",
".",
"to_grpc_metadata",
"(",
"routing_header",
")",
"metadata",
".",
"append",
"(",
"routing_metadata",
")",
"return",
"self",
".",
"_inner_api_calls",
"[",
"\"create_device\"",
"]",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
")"
] |
Creates a device in a device registry.
Example:
>>> from google.cloud import iot_v1
>>>
>>> client = iot_v1.DeviceManagerClient()
>>>
>>> parent = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]')
>>>
>>> # TODO: Initialize `device`:
>>> device = {}
>>>
>>> response = client.create_device(parent, device)
Args:
parent (str): The name of the device registry where this device should be created. For
example,
``projects/example-project/locations/us-central1/registries/my-registry``.
device (Union[dict, ~google.cloud.iot_v1.types.Device]): The device registration details. The field ``name`` must be empty. The
server generates ``name`` from the device registry ``id`` and the
``parent`` field.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.iot_v1.types.Device`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.iot_v1.types.Device` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
|
[
"Creates",
"a",
"device",
"in",
"a",
"device",
"registry",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/iot/google/cloud/iot_v1/gapic/device_manager_client.py#L618-L697
|
train
|
googleapis/google-cloud-python
|
iot/google/cloud/iot_v1/gapic/device_manager_client.py
|
DeviceManagerClient.set_iam_policy
|
def set_iam_policy(
self,
resource,
policy,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Sets the access control policy on the specified resource. Replaces any
existing policy.
Example:
>>> from google.cloud import iot_v1
>>>
>>> client = iot_v1.DeviceManagerClient()
>>>
>>> resource = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]')
>>>
>>> # TODO: Initialize `policy`:
>>> policy = {}
>>>
>>> response = client.set_iam_policy(resource, policy)
Args:
resource (str): REQUIRED: The resource for which the policy is being specified.
``resource`` is usually specified as a path. For example, a Project
resource is specified as ``projects/{project}``.
policy (Union[dict, ~google.cloud.iot_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The
size of the policy is limited to a few 10s of KB. An empty policy is a
valid policy but certain Cloud Platform services (such as Projects)
might reject them.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.iot_v1.types.Policy`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.iot_v1.types.Policy` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "set_iam_policy" not in self._inner_api_calls:
self._inner_api_calls[
"set_iam_policy"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.set_iam_policy,
default_retry=self._method_configs["SetIamPolicy"].retry,
default_timeout=self._method_configs["SetIamPolicy"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("resource", resource)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["set_iam_policy"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
python
|
def set_iam_policy(
self,
resource,
policy,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Sets the access control policy on the specified resource. Replaces any
existing policy.
Example:
>>> from google.cloud import iot_v1
>>>
>>> client = iot_v1.DeviceManagerClient()
>>>
>>> resource = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]')
>>>
>>> # TODO: Initialize `policy`:
>>> policy = {}
>>>
>>> response = client.set_iam_policy(resource, policy)
Args:
resource (str): REQUIRED: The resource for which the policy is being specified.
``resource`` is usually specified as a path. For example, a Project
resource is specified as ``projects/{project}``.
policy (Union[dict, ~google.cloud.iot_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The
size of the policy is limited to a few 10s of KB. An empty policy is a
valid policy but certain Cloud Platform services (such as Projects)
might reject them.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.iot_v1.types.Policy`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.iot_v1.types.Policy` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "set_iam_policy" not in self._inner_api_calls:
self._inner_api_calls[
"set_iam_policy"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.set_iam_policy,
default_retry=self._method_configs["SetIamPolicy"].retry,
default_timeout=self._method_configs["SetIamPolicy"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("resource", resource)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["set_iam_policy"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
[
"def",
"set_iam_policy",
"(",
"self",
",",
"resource",
",",
"policy",
",",
"retry",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"metadata",
"=",
"None",
",",
")",
":",
"# Wrap the transport method to add retry and timeout logic.",
"if",
"\"set_iam_policy\"",
"not",
"in",
"self",
".",
"_inner_api_calls",
":",
"self",
".",
"_inner_api_calls",
"[",
"\"set_iam_policy\"",
"]",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"wrap_method",
"(",
"self",
".",
"transport",
".",
"set_iam_policy",
",",
"default_retry",
"=",
"self",
".",
"_method_configs",
"[",
"\"SetIamPolicy\"",
"]",
".",
"retry",
",",
"default_timeout",
"=",
"self",
".",
"_method_configs",
"[",
"\"SetIamPolicy\"",
"]",
".",
"timeout",
",",
"client_info",
"=",
"self",
".",
"_client_info",
",",
")",
"request",
"=",
"iam_policy_pb2",
".",
"SetIamPolicyRequest",
"(",
"resource",
"=",
"resource",
",",
"policy",
"=",
"policy",
")",
"if",
"metadata",
"is",
"None",
":",
"metadata",
"=",
"[",
"]",
"metadata",
"=",
"list",
"(",
"metadata",
")",
"try",
":",
"routing_header",
"=",
"[",
"(",
"\"resource\"",
",",
"resource",
")",
"]",
"except",
"AttributeError",
":",
"pass",
"else",
":",
"routing_metadata",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"routing_header",
".",
"to_grpc_metadata",
"(",
"routing_header",
")",
"metadata",
".",
"append",
"(",
"routing_metadata",
")",
"return",
"self",
".",
"_inner_api_calls",
"[",
"\"set_iam_policy\"",
"]",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
")"
] |
Sets the access control policy on the specified resource. Replaces any
existing policy.
Example:
>>> from google.cloud import iot_v1
>>>
>>> client = iot_v1.DeviceManagerClient()
>>>
>>> resource = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]')
>>>
>>> # TODO: Initialize `policy`:
>>> policy = {}
>>>
>>> response = client.set_iam_policy(resource, policy)
Args:
resource (str): REQUIRED: The resource for which the policy is being specified.
``resource`` is usually specified as a path. For example, a Project
resource is specified as ``projects/{project}``.
policy (Union[dict, ~google.cloud.iot_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The
size of the policy is limited to a few 10s of KB. An empty policy is a
valid policy but certain Cloud Platform services (such as Projects)
might reject them.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.iot_v1.types.Policy`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.iot_v1.types.Policy` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
|
[
"Sets",
"the",
"access",
"control",
"policy",
"on",
"the",
"specified",
"resource",
".",
"Replaces",
"any",
"existing",
"policy",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/iot/google/cloud/iot_v1/gapic/device_manager_client.py#L1311-L1392
|
train
|
googleapis/google-cloud-python
|
iot/google/cloud/iot_v1/gapic/device_manager_client.py
|
DeviceManagerClient.bind_device_to_gateway
|
def bind_device_to_gateway(
self,
parent,
gateway_id,
device_id,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Associates the device with the gateway.
Example:
>>> from google.cloud import iot_v1
>>>
>>> client = iot_v1.DeviceManagerClient()
>>>
>>> parent = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]')
>>>
>>> # TODO: Initialize `gateway_id`:
>>> gateway_id = ''
>>>
>>> # TODO: Initialize `device_id`:
>>> device_id = ''
>>>
>>> response = client.bind_device_to_gateway(parent, gateway_id, device_id)
Args:
parent (str): The name of the registry. For example,
``projects/example-project/locations/us-central1/registries/my-registry``.
gateway_id (str): The value of ``gateway_id`` can be either the device numeric ID or the
user-defined device identifier.
device_id (str): The device to associate with the specified gateway. The value of
``device_id`` can be either the device numeric ID or the user-defined
device identifier.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.iot_v1.types.BindDeviceToGatewayResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "bind_device_to_gateway" not in self._inner_api_calls:
self._inner_api_calls[
"bind_device_to_gateway"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.bind_device_to_gateway,
default_retry=self._method_configs["BindDeviceToGateway"].retry,
default_timeout=self._method_configs["BindDeviceToGateway"].timeout,
client_info=self._client_info,
)
request = device_manager_pb2.BindDeviceToGatewayRequest(
parent=parent, gateway_id=gateway_id, device_id=device_id
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["bind_device_to_gateway"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
python
|
def bind_device_to_gateway(
self,
parent,
gateway_id,
device_id,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Associates the device with the gateway.
Example:
>>> from google.cloud import iot_v1
>>>
>>> client = iot_v1.DeviceManagerClient()
>>>
>>> parent = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]')
>>>
>>> # TODO: Initialize `gateway_id`:
>>> gateway_id = ''
>>>
>>> # TODO: Initialize `device_id`:
>>> device_id = ''
>>>
>>> response = client.bind_device_to_gateway(parent, gateway_id, device_id)
Args:
parent (str): The name of the registry. For example,
``projects/example-project/locations/us-central1/registries/my-registry``.
gateway_id (str): The value of ``gateway_id`` can be either the device numeric ID or the
user-defined device identifier.
device_id (str): The device to associate with the specified gateway. The value of
``device_id`` can be either the device numeric ID or the user-defined
device identifier.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.iot_v1.types.BindDeviceToGatewayResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "bind_device_to_gateway" not in self._inner_api_calls:
self._inner_api_calls[
"bind_device_to_gateway"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.bind_device_to_gateway,
default_retry=self._method_configs["BindDeviceToGateway"].retry,
default_timeout=self._method_configs["BindDeviceToGateway"].timeout,
client_info=self._client_info,
)
request = device_manager_pb2.BindDeviceToGatewayRequest(
parent=parent, gateway_id=gateway_id, device_id=device_id
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["bind_device_to_gateway"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
[
"def",
"bind_device_to_gateway",
"(",
"self",
",",
"parent",
",",
"gateway_id",
",",
"device_id",
",",
"retry",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"metadata",
"=",
"None",
",",
")",
":",
"# Wrap the transport method to add retry and timeout logic.",
"if",
"\"bind_device_to_gateway\"",
"not",
"in",
"self",
".",
"_inner_api_calls",
":",
"self",
".",
"_inner_api_calls",
"[",
"\"bind_device_to_gateway\"",
"]",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"wrap_method",
"(",
"self",
".",
"transport",
".",
"bind_device_to_gateway",
",",
"default_retry",
"=",
"self",
".",
"_method_configs",
"[",
"\"BindDeviceToGateway\"",
"]",
".",
"retry",
",",
"default_timeout",
"=",
"self",
".",
"_method_configs",
"[",
"\"BindDeviceToGateway\"",
"]",
".",
"timeout",
",",
"client_info",
"=",
"self",
".",
"_client_info",
",",
")",
"request",
"=",
"device_manager_pb2",
".",
"BindDeviceToGatewayRequest",
"(",
"parent",
"=",
"parent",
",",
"gateway_id",
"=",
"gateway_id",
",",
"device_id",
"=",
"device_id",
")",
"if",
"metadata",
"is",
"None",
":",
"metadata",
"=",
"[",
"]",
"metadata",
"=",
"list",
"(",
"metadata",
")",
"try",
":",
"routing_header",
"=",
"[",
"(",
"\"parent\"",
",",
"parent",
")",
"]",
"except",
"AttributeError",
":",
"pass",
"else",
":",
"routing_metadata",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"routing_header",
".",
"to_grpc_metadata",
"(",
"routing_header",
")",
"metadata",
".",
"append",
"(",
"routing_metadata",
")",
"return",
"self",
".",
"_inner_api_calls",
"[",
"\"bind_device_to_gateway\"",
"]",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
")"
] |
Associates the device with the gateway.
Example:
>>> from google.cloud import iot_v1
>>>
>>> client = iot_v1.DeviceManagerClient()
>>>
>>> parent = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]')
>>>
>>> # TODO: Initialize `gateway_id`:
>>> gateway_id = ''
>>>
>>> # TODO: Initialize `device_id`:
>>> device_id = ''
>>>
>>> response = client.bind_device_to_gateway(parent, gateway_id, device_id)
Args:
parent (str): The name of the registry. For example,
``projects/example-project/locations/us-central1/registries/my-registry``.
gateway_id (str): The value of ``gateway_id`` can be either the device numeric ID or the
user-defined device identifier.
device_id (str): The device to associate with the specified gateway. The value of
``device_id`` can be either the device numeric ID or the user-defined
device identifier.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.iot_v1.types.BindDeviceToGatewayResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
|
[
"Associates",
"the",
"device",
"with",
"the",
"gateway",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/iot/google/cloud/iot_v1/gapic/device_manager_client.py#L1649-L1732
|
train
|
googleapis/google-cloud-python
|
tasks/google/cloud/tasks_v2/gapic/cloud_tasks_client.py
|
CloudTasksClient.queue_path
|
def queue_path(cls, project, location, queue):
"""Return a fully-qualified queue string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/queues/{queue}",
project=project,
location=location,
queue=queue,
)
|
python
|
def queue_path(cls, project, location, queue):
"""Return a fully-qualified queue string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/queues/{queue}",
project=project,
location=location,
queue=queue,
)
|
[
"def",
"queue_path",
"(",
"cls",
",",
"project",
",",
"location",
",",
"queue",
")",
":",
"return",
"google",
".",
"api_core",
".",
"path_template",
".",
"expand",
"(",
"\"projects/{project}/locations/{location}/queues/{queue}\"",
",",
"project",
"=",
"project",
",",
"location",
"=",
"location",
",",
"queue",
"=",
"queue",
",",
")"
] |
Return a fully-qualified queue string.
|
[
"Return",
"a",
"fully",
"-",
"qualified",
"queue",
"string",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/tasks/google/cloud/tasks_v2/gapic/cloud_tasks_client.py#L97-L104
|
train
|
googleapis/google-cloud-python
|
tasks/google/cloud/tasks_v2/gapic/cloud_tasks_client.py
|
CloudTasksClient.task_path
|
def task_path(cls, project, location, queue, task):
"""Return a fully-qualified task string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/queues/{queue}/tasks/{task}",
project=project,
location=location,
queue=queue,
task=task,
)
|
python
|
def task_path(cls, project, location, queue, task):
"""Return a fully-qualified task string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/queues/{queue}/tasks/{task}",
project=project,
location=location,
queue=queue,
task=task,
)
|
[
"def",
"task_path",
"(",
"cls",
",",
"project",
",",
"location",
",",
"queue",
",",
"task",
")",
":",
"return",
"google",
".",
"api_core",
".",
"path_template",
".",
"expand",
"(",
"\"projects/{project}/locations/{location}/queues/{queue}/tasks/{task}\"",
",",
"project",
"=",
"project",
",",
"location",
"=",
"location",
",",
"queue",
"=",
"queue",
",",
"task",
"=",
"task",
",",
")"
] |
Return a fully-qualified task string.
|
[
"Return",
"a",
"fully",
"-",
"qualified",
"task",
"string",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/tasks/google/cloud/tasks_v2/gapic/cloud_tasks_client.py#L107-L115
|
train
|
googleapis/google-cloud-python
|
tasks/google/cloud/tasks_v2/gapic/cloud_tasks_client.py
|
CloudTasksClient.create_queue
|
def create_queue(
self,
parent,
queue,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a queue.
Queues created with this method allow tasks to live for a maximum of 31
days. After a task is 31 days old, the task will be deleted regardless
of whether it was dispatched or not.
WARNING: Using this method may have unintended side effects if you are
using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your
queues. Read `Overview of Queue Management and
queue.yaml <https://cloud.google.com/tasks/docs/queue-yaml>`__ before
using this method.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `queue`:
>>> queue = {}
>>>
>>> response = client.create_queue(parent, queue)
Args:
parent (str): Required.
The location name in which the queue will be created. For example:
``projects/PROJECT_ID/locations/LOCATION_ID``
The list of allowed locations can be obtained by calling Cloud Tasks'
implementation of ``ListLocations``.
queue (Union[dict, ~google.cloud.tasks_v2.types.Queue]): Required.
The queue to create.
``Queue's name`` cannot be the same as an existing queue.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.Queue`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Queue` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_queue" not in self._inner_api_calls:
self._inner_api_calls[
"create_queue"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_queue,
default_retry=self._method_configs["CreateQueue"].retry,
default_timeout=self._method_configs["CreateQueue"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.CreateQueueRequest(parent=parent, queue=queue)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_queue"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
python
|
def create_queue(
self,
parent,
queue,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a queue.
Queues created with this method allow tasks to live for a maximum of 31
days. After a task is 31 days old, the task will be deleted regardless
of whether it was dispatched or not.
WARNING: Using this method may have unintended side effects if you are
using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your
queues. Read `Overview of Queue Management and
queue.yaml <https://cloud.google.com/tasks/docs/queue-yaml>`__ before
using this method.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `queue`:
>>> queue = {}
>>>
>>> response = client.create_queue(parent, queue)
Args:
parent (str): Required.
The location name in which the queue will be created. For example:
``projects/PROJECT_ID/locations/LOCATION_ID``
The list of allowed locations can be obtained by calling Cloud Tasks'
implementation of ``ListLocations``.
queue (Union[dict, ~google.cloud.tasks_v2.types.Queue]): Required.
The queue to create.
``Queue's name`` cannot be the same as an existing queue.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.Queue`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Queue` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_queue" not in self._inner_api_calls:
self._inner_api_calls[
"create_queue"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_queue,
default_retry=self._method_configs["CreateQueue"].retry,
default_timeout=self._method_configs["CreateQueue"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.CreateQueueRequest(parent=parent, queue=queue)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_queue"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
[
"def",
"create_queue",
"(",
"self",
",",
"parent",
",",
"queue",
",",
"retry",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"metadata",
"=",
"None",
",",
")",
":",
"# Wrap the transport method to add retry and timeout logic.",
"if",
"\"create_queue\"",
"not",
"in",
"self",
".",
"_inner_api_calls",
":",
"self",
".",
"_inner_api_calls",
"[",
"\"create_queue\"",
"]",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"wrap_method",
"(",
"self",
".",
"transport",
".",
"create_queue",
",",
"default_retry",
"=",
"self",
".",
"_method_configs",
"[",
"\"CreateQueue\"",
"]",
".",
"retry",
",",
"default_timeout",
"=",
"self",
".",
"_method_configs",
"[",
"\"CreateQueue\"",
"]",
".",
"timeout",
",",
"client_info",
"=",
"self",
".",
"_client_info",
",",
")",
"request",
"=",
"cloudtasks_pb2",
".",
"CreateQueueRequest",
"(",
"parent",
"=",
"parent",
",",
"queue",
"=",
"queue",
")",
"if",
"metadata",
"is",
"None",
":",
"metadata",
"=",
"[",
"]",
"metadata",
"=",
"list",
"(",
"metadata",
")",
"try",
":",
"routing_header",
"=",
"[",
"(",
"\"parent\"",
",",
"parent",
")",
"]",
"except",
"AttributeError",
":",
"pass",
"else",
":",
"routing_metadata",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"routing_header",
".",
"to_grpc_metadata",
"(",
"routing_header",
")",
"metadata",
".",
"append",
"(",
"routing_metadata",
")",
"return",
"self",
".",
"_inner_api_calls",
"[",
"\"create_queue\"",
"]",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
")"
] |
Creates a queue.
Queues created with this method allow tasks to live for a maximum of 31
days. After a task is 31 days old, the task will be deleted regardless
of whether it was dispatched or not.
WARNING: Using this method may have unintended side effects if you are
using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your
queues. Read `Overview of Queue Management and
queue.yaml <https://cloud.google.com/tasks/docs/queue-yaml>`__ before
using this method.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `queue`:
>>> queue = {}
>>>
>>> response = client.create_queue(parent, queue)
Args:
parent (str): Required.
The location name in which the queue will be created. For example:
``projects/PROJECT_ID/locations/LOCATION_ID``
The list of allowed locations can be obtained by calling Cloud Tasks'
implementation of ``ListLocations``.
queue (Union[dict, ~google.cloud.tasks_v2.types.Queue]): Required.
The queue to create.
``Queue's name`` cannot be the same as an existing queue.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.Queue`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Queue` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
|
[
"Creates",
"a",
"queue",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/tasks/google/cloud/tasks_v2/gapic/cloud_tasks_client.py#L407-L502
|
train
|
googleapis/google-cloud-python
|
tasks/google/cloud/tasks_v2/gapic/cloud_tasks_client.py
|
CloudTasksClient.create_task
|
def create_task(
self,
parent,
task,
response_view=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a task and adds it to a queue.
Tasks cannot be updated after creation; there is no UpdateTask command.
- For ``App Engine queues``, the maximum task size is 100KB.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> # TODO: Initialize `task`:
>>> task = {}
>>>
>>> response = client.create_task(parent, task)
Args:
parent (str): Required.
The queue name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
The queue must already exist.
task (Union[dict, ~google.cloud.tasks_v2.types.Task]): Required.
The task to add.
Task names have the following format:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``.
The user can optionally specify a task ``name``. If a name is not
specified then the system will generate a random unique task id, which
will be set in the task returned in the ``response``.
If ``schedule_time`` is not set or is in the past then Cloud Tasks will
set it to the current time.
Task De-duplication:
Explicitly specifying a task ID enables task de-duplication. If a task's
ID is identical to that of an existing task or a task that was deleted
or executed recently then the call will fail with ``ALREADY_EXISTS``. If
the task's queue was created using Cloud Tasks, then another task with
the same name can't be created for ~1hour after the original task was
deleted or executed. If the task's queue was created using queue.yaml or
queue.xml, then another task with the same name can't be created for
~9days after the original task was deleted or executed.
Because there is an extra lookup cost to identify duplicate task names,
these ``CreateTask`` calls have significantly increased latency. Using
hashed strings for the task id or for the prefix of the task id is
recommended. Choosing task ids that are sequential or have sequential
prefixes, for example using a timestamp, causes an increase in latency
and error rates in all task commands. The infrastructure relies on an
approximately uniform distribution of task ids to store and serve tasks
efficiently.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.Task`
response_view (~google.cloud.tasks_v2.types.View): The response\_view specifies which subset of the ``Task`` will be
returned.
By default response\_view is ``BASIC``; not all information is retrieved
by default because some data, such as payloads, might be desirable to
return only when needed because of its large size or because of the
sensitivity of data that it contains.
Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView``
`Google IAM <https://cloud.google.com/iam/>`___ permission on the
``Task`` resource.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Task` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_task" not in self._inner_api_calls:
self._inner_api_calls[
"create_task"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_task,
default_retry=self._method_configs["CreateTask"].retry,
default_timeout=self._method_configs["CreateTask"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.CreateTaskRequest(
parent=parent, task=task, response_view=response_view
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_task"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
python
|
def create_task(
self,
parent,
task,
response_view=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a task and adds it to a queue.
Tasks cannot be updated after creation; there is no UpdateTask command.
- For ``App Engine queues``, the maximum task size is 100KB.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> # TODO: Initialize `task`:
>>> task = {}
>>>
>>> response = client.create_task(parent, task)
Args:
parent (str): Required.
The queue name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
The queue must already exist.
task (Union[dict, ~google.cloud.tasks_v2.types.Task]): Required.
The task to add.
Task names have the following format:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``.
The user can optionally specify a task ``name``. If a name is not
specified then the system will generate a random unique task id, which
will be set in the task returned in the ``response``.
If ``schedule_time`` is not set or is in the past then Cloud Tasks will
set it to the current time.
Task De-duplication:
Explicitly specifying a task ID enables task de-duplication. If a task's
ID is identical to that of an existing task or a task that was deleted
or executed recently then the call will fail with ``ALREADY_EXISTS``. If
the task's queue was created using Cloud Tasks, then another task with
the same name can't be created for ~1hour after the original task was
deleted or executed. If the task's queue was created using queue.yaml or
queue.xml, then another task with the same name can't be created for
~9days after the original task was deleted or executed.
Because there is an extra lookup cost to identify duplicate task names,
these ``CreateTask`` calls have significantly increased latency. Using
hashed strings for the task id or for the prefix of the task id is
recommended. Choosing task ids that are sequential or have sequential
prefixes, for example using a timestamp, causes an increase in latency
and error rates in all task commands. The infrastructure relies on an
approximately uniform distribution of task ids to store and serve tasks
efficiently.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.Task`
response_view (~google.cloud.tasks_v2.types.View): The response\_view specifies which subset of the ``Task`` will be
returned.
By default response\_view is ``BASIC``; not all information is retrieved
by default because some data, such as payloads, might be desirable to
return only when needed because of its large size or because of the
sensitivity of data that it contains.
Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView``
`Google IAM <https://cloud.google.com/iam/>`___ permission on the
``Task`` resource.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Task` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_task" not in self._inner_api_calls:
self._inner_api_calls[
"create_task"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_task,
default_retry=self._method_configs["CreateTask"].retry,
default_timeout=self._method_configs["CreateTask"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.CreateTaskRequest(
parent=parent, task=task, response_view=response_view
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_task"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
[
"def",
"create_task",
"(",
"self",
",",
"parent",
",",
"task",
",",
"response_view",
"=",
"None",
",",
"retry",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"metadata",
"=",
"None",
",",
")",
":",
"# Wrap the transport method to add retry and timeout logic.",
"if",
"\"create_task\"",
"not",
"in",
"self",
".",
"_inner_api_calls",
":",
"self",
".",
"_inner_api_calls",
"[",
"\"create_task\"",
"]",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"wrap_method",
"(",
"self",
".",
"transport",
".",
"create_task",
",",
"default_retry",
"=",
"self",
".",
"_method_configs",
"[",
"\"CreateTask\"",
"]",
".",
"retry",
",",
"default_timeout",
"=",
"self",
".",
"_method_configs",
"[",
"\"CreateTask\"",
"]",
".",
"timeout",
",",
"client_info",
"=",
"self",
".",
"_client_info",
",",
")",
"request",
"=",
"cloudtasks_pb2",
".",
"CreateTaskRequest",
"(",
"parent",
"=",
"parent",
",",
"task",
"=",
"task",
",",
"response_view",
"=",
"response_view",
")",
"if",
"metadata",
"is",
"None",
":",
"metadata",
"=",
"[",
"]",
"metadata",
"=",
"list",
"(",
"metadata",
")",
"try",
":",
"routing_header",
"=",
"[",
"(",
"\"parent\"",
",",
"parent",
")",
"]",
"except",
"AttributeError",
":",
"pass",
"else",
":",
"routing_metadata",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"routing_header",
".",
"to_grpc_metadata",
"(",
"routing_header",
")",
"metadata",
".",
"append",
"(",
"routing_metadata",
")",
"return",
"self",
".",
"_inner_api_calls",
"[",
"\"create_task\"",
"]",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
")"
] |
Creates a task and adds it to a queue.
Tasks cannot be updated after creation; there is no UpdateTask command.
- For ``App Engine queues``, the maximum task size is 100KB.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> # TODO: Initialize `task`:
>>> task = {}
>>>
>>> response = client.create_task(parent, task)
Args:
parent (str): Required.
The queue name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
The queue must already exist.
task (Union[dict, ~google.cloud.tasks_v2.types.Task]): Required.
The task to add.
Task names have the following format:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``.
The user can optionally specify a task ``name``. If a name is not
specified then the system will generate a random unique task id, which
will be set in the task returned in the ``response``.
If ``schedule_time`` is not set or is in the past then Cloud Tasks will
set it to the current time.
Task De-duplication:
Explicitly specifying a task ID enables task de-duplication. If a task's
ID is identical to that of an existing task or a task that was deleted
or executed recently then the call will fail with ``ALREADY_EXISTS``. If
the task's queue was created using Cloud Tasks, then another task with
the same name can't be created for ~1hour after the original task was
deleted or executed. If the task's queue was created using queue.yaml or
queue.xml, then another task with the same name can't be created for
~9days after the original task was deleted or executed.
Because there is an extra lookup cost to identify duplicate task names,
these ``CreateTask`` calls have significantly increased latency. Using
hashed strings for the task id or for the prefix of the task id is
recommended. Choosing task ids that are sequential or have sequential
prefixes, for example using a timestamp, causes an increase in latency
and error rates in all task commands. The infrastructure relies on an
approximately uniform distribution of task ids to store and serve tasks
efficiently.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.Task`
response_view (~google.cloud.tasks_v2.types.View): The response\_view specifies which subset of the ``Task`` will be
returned.
By default response\_view is ``BASIC``; not all information is retrieved
by default because some data, such as payloads, might be desirable to
return only when needed because of its large size or because of the
sensitivity of data that it contains.
Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView``
`Google IAM <https://cloud.google.com/iam/>`___ permission on the
``Task`` resource.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Task` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
|
[
"Creates",
"a",
"task",
"and",
"adds",
"it",
"to",
"a",
"queue",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/tasks/google/cloud/tasks_v2/gapic/cloud_tasks_client.py#L1387-L1516
|
train
|
googleapis/google-cloud-python
|
api_core/google/api_core/timeout.py
|
_exponential_timeout_generator
|
def _exponential_timeout_generator(initial, maximum, multiplier, deadline):
"""A generator that yields exponential timeout values.
Args:
initial (float): The initial timeout.
maximum (float): The maximum timeout.
multiplier (float): The multiplier applied to the timeout.
deadline (float): The overall deadline across all invocations.
Yields:
float: A timeout value.
"""
if deadline is not None:
deadline_datetime = datetime_helpers.utcnow() + datetime.timedelta(
seconds=deadline
)
else:
deadline_datetime = datetime.datetime.max
timeout = initial
while True:
now = datetime_helpers.utcnow()
yield min(
# The calculated timeout based on invocations.
timeout,
# The set maximum timeout.
maximum,
# The remaining time before the deadline is reached.
float((deadline_datetime - now).seconds),
)
timeout = timeout * multiplier
|
python
|
def _exponential_timeout_generator(initial, maximum, multiplier, deadline):
"""A generator that yields exponential timeout values.
Args:
initial (float): The initial timeout.
maximum (float): The maximum timeout.
multiplier (float): The multiplier applied to the timeout.
deadline (float): The overall deadline across all invocations.
Yields:
float: A timeout value.
"""
if deadline is not None:
deadline_datetime = datetime_helpers.utcnow() + datetime.timedelta(
seconds=deadline
)
else:
deadline_datetime = datetime.datetime.max
timeout = initial
while True:
now = datetime_helpers.utcnow()
yield min(
# The calculated timeout based on invocations.
timeout,
# The set maximum timeout.
maximum,
# The remaining time before the deadline is reached.
float((deadline_datetime - now).seconds),
)
timeout = timeout * multiplier
|
[
"def",
"_exponential_timeout_generator",
"(",
"initial",
",",
"maximum",
",",
"multiplier",
",",
"deadline",
")",
":",
"if",
"deadline",
"is",
"not",
"None",
":",
"deadline_datetime",
"=",
"datetime_helpers",
".",
"utcnow",
"(",
")",
"+",
"datetime",
".",
"timedelta",
"(",
"seconds",
"=",
"deadline",
")",
"else",
":",
"deadline_datetime",
"=",
"datetime",
".",
"datetime",
".",
"max",
"timeout",
"=",
"initial",
"while",
"True",
":",
"now",
"=",
"datetime_helpers",
".",
"utcnow",
"(",
")",
"yield",
"min",
"(",
"# The calculated timeout based on invocations.",
"timeout",
",",
"# The set maximum timeout.",
"maximum",
",",
"# The remaining time before the deadline is reached.",
"float",
"(",
"(",
"deadline_datetime",
"-",
"now",
")",
".",
"seconds",
")",
",",
")",
"timeout",
"=",
"timeout",
"*",
"multiplier"
] |
A generator that yields exponential timeout values.
Args:
initial (float): The initial timeout.
maximum (float): The maximum timeout.
multiplier (float): The multiplier applied to the timeout.
deadline (float): The overall deadline across all invocations.
Yields:
float: A timeout value.
|
[
"A",
"generator",
"that",
"yields",
"exponential",
"timeout",
"values",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/api_core/google/api_core/timeout.py#L110-L140
|
train
|
googleapis/google-cloud-python
|
bigquery/google/cloud/bigquery/dbapi/cursor.py
|
_format_operation_list
|
def _format_operation_list(operation, parameters):
"""Formats parameters in operation in the way BigQuery expects.
The input operation will be a query like ``SELECT %s`` and the output
will be a query like ``SELECT ?``.
:type operation: str
:param operation: A Google BigQuery query string.
:type parameters: Sequence[Any]
:param parameters: Sequence of parameter values.
:rtype: str
:returns: A formatted query string.
:raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError`
if a parameter used in the operation is not found in the
``parameters`` argument.
"""
formatted_params = ["?" for _ in parameters]
try:
return operation % tuple(formatted_params)
except TypeError as exc:
raise exceptions.ProgrammingError(exc)
|
python
|
def _format_operation_list(operation, parameters):
"""Formats parameters in operation in the way BigQuery expects.
The input operation will be a query like ``SELECT %s`` and the output
will be a query like ``SELECT ?``.
:type operation: str
:param operation: A Google BigQuery query string.
:type parameters: Sequence[Any]
:param parameters: Sequence of parameter values.
:rtype: str
:returns: A formatted query string.
:raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError`
if a parameter used in the operation is not found in the
``parameters`` argument.
"""
formatted_params = ["?" for _ in parameters]
try:
return operation % tuple(formatted_params)
except TypeError as exc:
raise exceptions.ProgrammingError(exc)
|
[
"def",
"_format_operation_list",
"(",
"operation",
",",
"parameters",
")",
":",
"formatted_params",
"=",
"[",
"\"?\"",
"for",
"_",
"in",
"parameters",
"]",
"try",
":",
"return",
"operation",
"%",
"tuple",
"(",
"formatted_params",
")",
"except",
"TypeError",
"as",
"exc",
":",
"raise",
"exceptions",
".",
"ProgrammingError",
"(",
"exc",
")"
] |
Formats parameters in operation in the way BigQuery expects.
The input operation will be a query like ``SELECT %s`` and the output
will be a query like ``SELECT ?``.
:type operation: str
:param operation: A Google BigQuery query string.
:type parameters: Sequence[Any]
:param parameters: Sequence of parameter values.
:rtype: str
:returns: A formatted query string.
:raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError`
if a parameter used in the operation is not found in the
``parameters`` argument.
|
[
"Formats",
"parameters",
"in",
"operation",
"in",
"the",
"way",
"BigQuery",
"expects",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigquery/google/cloud/bigquery/dbapi/cursor.py#L282-L305
|
train
|
googleapis/google-cloud-python
|
bigquery/google/cloud/bigquery/dbapi/cursor.py
|
_format_operation_dict
|
def _format_operation_dict(operation, parameters):
"""Formats parameters in operation in the way BigQuery expects.
The input operation will be a query like ``SELECT %(namedparam)s`` and
the output will be a query like ``SELECT @namedparam``.
:type operation: str
:param operation: A Google BigQuery query string.
:type parameters: Mapping[str, Any]
:param parameters: Dictionary of parameter values.
:rtype: str
:returns: A formatted query string.
:raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError`
if a parameter used in the operation is not found in the
``parameters`` argument.
"""
formatted_params = {}
for name in parameters:
escaped_name = name.replace("`", r"\`")
formatted_params[name] = "@`{}`".format(escaped_name)
try:
return operation % formatted_params
except KeyError as exc:
raise exceptions.ProgrammingError(exc)
|
python
|
def _format_operation_dict(operation, parameters):
"""Formats parameters in operation in the way BigQuery expects.
The input operation will be a query like ``SELECT %(namedparam)s`` and
the output will be a query like ``SELECT @namedparam``.
:type operation: str
:param operation: A Google BigQuery query string.
:type parameters: Mapping[str, Any]
:param parameters: Dictionary of parameter values.
:rtype: str
:returns: A formatted query string.
:raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError`
if a parameter used in the operation is not found in the
``parameters`` argument.
"""
formatted_params = {}
for name in parameters:
escaped_name = name.replace("`", r"\`")
formatted_params[name] = "@`{}`".format(escaped_name)
try:
return operation % formatted_params
except KeyError as exc:
raise exceptions.ProgrammingError(exc)
|
[
"def",
"_format_operation_dict",
"(",
"operation",
",",
"parameters",
")",
":",
"formatted_params",
"=",
"{",
"}",
"for",
"name",
"in",
"parameters",
":",
"escaped_name",
"=",
"name",
".",
"replace",
"(",
"\"`\"",
",",
"r\"\\`\"",
")",
"formatted_params",
"[",
"name",
"]",
"=",
"\"@`{}`\"",
".",
"format",
"(",
"escaped_name",
")",
"try",
":",
"return",
"operation",
"%",
"formatted_params",
"except",
"KeyError",
"as",
"exc",
":",
"raise",
"exceptions",
".",
"ProgrammingError",
"(",
"exc",
")"
] |
Formats parameters in operation in the way BigQuery expects.
The input operation will be a query like ``SELECT %(namedparam)s`` and
the output will be a query like ``SELECT @namedparam``.
:type operation: str
:param operation: A Google BigQuery query string.
:type parameters: Mapping[str, Any]
:param parameters: Dictionary of parameter values.
:rtype: str
:returns: A formatted query string.
:raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError`
if a parameter used in the operation is not found in the
``parameters`` argument.
|
[
"Formats",
"parameters",
"in",
"operation",
"in",
"the",
"way",
"BigQuery",
"expects",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigquery/google/cloud/bigquery/dbapi/cursor.py#L308-L334
|
train
|
googleapis/google-cloud-python
|
bigquery/google/cloud/bigquery/dbapi/cursor.py
|
_format_operation
|
def _format_operation(operation, parameters=None):
"""Formats parameters in operation in way BigQuery expects.
:type: str
:param operation: A Google BigQuery query string.
:type: Mapping[str, Any] or Sequence[Any]
:param parameters: Optional parameter values.
:rtype: str
:returns: A formatted query string.
:raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError`
if a parameter used in the operation is not found in the
``parameters`` argument.
"""
if parameters is None:
return operation
if isinstance(parameters, collections_abc.Mapping):
return _format_operation_dict(operation, parameters)
return _format_operation_list(operation, parameters)
|
python
|
def _format_operation(operation, parameters=None):
"""Formats parameters in operation in way BigQuery expects.
:type: str
:param operation: A Google BigQuery query string.
:type: Mapping[str, Any] or Sequence[Any]
:param parameters: Optional parameter values.
:rtype: str
:returns: A formatted query string.
:raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError`
if a parameter used in the operation is not found in the
``parameters`` argument.
"""
if parameters is None:
return operation
if isinstance(parameters, collections_abc.Mapping):
return _format_operation_dict(operation, parameters)
return _format_operation_list(operation, parameters)
|
[
"def",
"_format_operation",
"(",
"operation",
",",
"parameters",
"=",
"None",
")",
":",
"if",
"parameters",
"is",
"None",
":",
"return",
"operation",
"if",
"isinstance",
"(",
"parameters",
",",
"collections_abc",
".",
"Mapping",
")",
":",
"return",
"_format_operation_dict",
"(",
"operation",
",",
"parameters",
")",
"return",
"_format_operation_list",
"(",
"operation",
",",
"parameters",
")"
] |
Formats parameters in operation in way BigQuery expects.
:type: str
:param operation: A Google BigQuery query string.
:type: Mapping[str, Any] or Sequence[Any]
:param parameters: Optional parameter values.
:rtype: str
:returns: A formatted query string.
:raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError`
if a parameter used in the operation is not found in the
``parameters`` argument.
|
[
"Formats",
"parameters",
"in",
"operation",
"in",
"way",
"BigQuery",
"expects",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigquery/google/cloud/bigquery/dbapi/cursor.py#L337-L358
|
train
|
googleapis/google-cloud-python
|
bigquery/google/cloud/bigquery/dbapi/cursor.py
|
Cursor._set_description
|
def _set_description(self, schema):
"""Set description from schema.
:type schema: Sequence[google.cloud.bigquery.schema.SchemaField]
:param schema: A description of fields in the schema.
"""
if schema is None:
self.description = None
return
self.description = tuple(
[
Column(
name=field.name,
type_code=field.field_type,
display_size=None,
internal_size=None,
precision=None,
scale=None,
null_ok=field.is_nullable,
)
for field in schema
]
)
|
python
|
def _set_description(self, schema):
"""Set description from schema.
:type schema: Sequence[google.cloud.bigquery.schema.SchemaField]
:param schema: A description of fields in the schema.
"""
if schema is None:
self.description = None
return
self.description = tuple(
[
Column(
name=field.name,
type_code=field.field_type,
display_size=None,
internal_size=None,
precision=None,
scale=None,
null_ok=field.is_nullable,
)
for field in schema
]
)
|
[
"def",
"_set_description",
"(",
"self",
",",
"schema",
")",
":",
"if",
"schema",
"is",
"None",
":",
"self",
".",
"description",
"=",
"None",
"return",
"self",
".",
"description",
"=",
"tuple",
"(",
"[",
"Column",
"(",
"name",
"=",
"field",
".",
"name",
",",
"type_code",
"=",
"field",
".",
"field_type",
",",
"display_size",
"=",
"None",
",",
"internal_size",
"=",
"None",
",",
"precision",
"=",
"None",
",",
"scale",
"=",
"None",
",",
"null_ok",
"=",
"field",
".",
"is_nullable",
",",
")",
"for",
"field",
"in",
"schema",
"]",
")"
] |
Set description from schema.
:type schema: Sequence[google.cloud.bigquery.schema.SchemaField]
:param schema: A description of fields in the schema.
|
[
"Set",
"description",
"from",
"schema",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigquery/google/cloud/bigquery/dbapi/cursor.py#L72-L95
|
train
|
googleapis/google-cloud-python
|
bigquery/google/cloud/bigquery/dbapi/cursor.py
|
Cursor._set_rowcount
|
def _set_rowcount(self, query_results):
"""Set the rowcount from query results.
Normally, this sets rowcount to the number of rows returned by the
query, but if it was a DML statement, it sets rowcount to the number
of modified rows.
:type query_results:
:class:`~google.cloud.bigquery.query._QueryResults`
:param query_results: results of a query
"""
total_rows = 0
num_dml_affected_rows = query_results.num_dml_affected_rows
if query_results.total_rows is not None and query_results.total_rows > 0:
total_rows = query_results.total_rows
if num_dml_affected_rows is not None and num_dml_affected_rows > 0:
total_rows = num_dml_affected_rows
self.rowcount = total_rows
|
python
|
def _set_rowcount(self, query_results):
"""Set the rowcount from query results.
Normally, this sets rowcount to the number of rows returned by the
query, but if it was a DML statement, it sets rowcount to the number
of modified rows.
:type query_results:
:class:`~google.cloud.bigquery.query._QueryResults`
:param query_results: results of a query
"""
total_rows = 0
num_dml_affected_rows = query_results.num_dml_affected_rows
if query_results.total_rows is not None and query_results.total_rows > 0:
total_rows = query_results.total_rows
if num_dml_affected_rows is not None and num_dml_affected_rows > 0:
total_rows = num_dml_affected_rows
self.rowcount = total_rows
|
[
"def",
"_set_rowcount",
"(",
"self",
",",
"query_results",
")",
":",
"total_rows",
"=",
"0",
"num_dml_affected_rows",
"=",
"query_results",
".",
"num_dml_affected_rows",
"if",
"query_results",
".",
"total_rows",
"is",
"not",
"None",
"and",
"query_results",
".",
"total_rows",
">",
"0",
":",
"total_rows",
"=",
"query_results",
".",
"total_rows",
"if",
"num_dml_affected_rows",
"is",
"not",
"None",
"and",
"num_dml_affected_rows",
">",
"0",
":",
"total_rows",
"=",
"num_dml_affected_rows",
"self",
".",
"rowcount",
"=",
"total_rows"
] |
Set the rowcount from query results.
Normally, this sets rowcount to the number of rows returned by the
query, but if it was a DML statement, it sets rowcount to the number
of modified rows.
:type query_results:
:class:`~google.cloud.bigquery.query._QueryResults`
:param query_results: results of a query
|
[
"Set",
"the",
"rowcount",
"from",
"query",
"results",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigquery/google/cloud/bigquery/dbapi/cursor.py#L97-L115
|
train
|
googleapis/google-cloud-python
|
bigquery/google/cloud/bigquery/dbapi/cursor.py
|
Cursor.execute
|
def execute(self, operation, parameters=None, job_id=None):
"""Prepare and execute a database operation.
.. note::
When setting query parameters, values which are "text"
(``unicode`` in Python2, ``str`` in Python3) will use
the 'STRING' BigQuery type. Values which are "bytes" (``str`` in
Python2, ``bytes`` in Python3), will use using the 'BYTES' type.
A `~datetime.datetime` parameter without timezone information uses
the 'DATETIME' BigQuery type (example: Global Pi Day Celebration
March 14, 2017 at 1:59pm). A `~datetime.datetime` parameter with
timezone information uses the 'TIMESTAMP' BigQuery type (example:
a wedding on April 29, 2011 at 11am, British Summer Time).
For more information about BigQuery data types, see:
https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types
``STRUCT``/``RECORD`` and ``REPEATED`` query parameters are not
yet supported. See:
https://github.com/GoogleCloudPlatform/google-cloud-python/issues/3524
:type operation: str
:param operation: A Google BigQuery query string.
:type parameters: Mapping[str, Any] or Sequence[Any]
:param parameters:
(Optional) dictionary or sequence of parameter values.
:type job_id: str
:param job_id: (Optional) The job_id to use. If not set, a job ID
is generated at random.
"""
self._query_data = None
self._query_job = None
client = self.connection._client
# The DB-API uses the pyformat formatting, since the way BigQuery does
# query parameters was not one of the standard options. Convert both
# the query and the parameters to the format expected by the client
# libraries.
formatted_operation = _format_operation(operation, parameters=parameters)
query_parameters = _helpers.to_query_parameters(parameters)
config = job.QueryJobConfig()
config.query_parameters = query_parameters
config.use_legacy_sql = False
self._query_job = client.query(
formatted_operation, job_config=config, job_id=job_id
)
# Wait for the query to finish.
try:
self._query_job.result()
except google.cloud.exceptions.GoogleCloudError as exc:
raise exceptions.DatabaseError(exc)
query_results = self._query_job._query_results
self._set_rowcount(query_results)
self._set_description(query_results.schema)
|
python
|
def execute(self, operation, parameters=None, job_id=None):
"""Prepare and execute a database operation.
.. note::
When setting query parameters, values which are "text"
(``unicode`` in Python2, ``str`` in Python3) will use
the 'STRING' BigQuery type. Values which are "bytes" (``str`` in
Python2, ``bytes`` in Python3), will use using the 'BYTES' type.
A `~datetime.datetime` parameter without timezone information uses
the 'DATETIME' BigQuery type (example: Global Pi Day Celebration
March 14, 2017 at 1:59pm). A `~datetime.datetime` parameter with
timezone information uses the 'TIMESTAMP' BigQuery type (example:
a wedding on April 29, 2011 at 11am, British Summer Time).
For more information about BigQuery data types, see:
https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types
``STRUCT``/``RECORD`` and ``REPEATED`` query parameters are not
yet supported. See:
https://github.com/GoogleCloudPlatform/google-cloud-python/issues/3524
:type operation: str
:param operation: A Google BigQuery query string.
:type parameters: Mapping[str, Any] or Sequence[Any]
:param parameters:
(Optional) dictionary or sequence of parameter values.
:type job_id: str
:param job_id: (Optional) The job_id to use. If not set, a job ID
is generated at random.
"""
self._query_data = None
self._query_job = None
client = self.connection._client
# The DB-API uses the pyformat formatting, since the way BigQuery does
# query parameters was not one of the standard options. Convert both
# the query and the parameters to the format expected by the client
# libraries.
formatted_operation = _format_operation(operation, parameters=parameters)
query_parameters = _helpers.to_query_parameters(parameters)
config = job.QueryJobConfig()
config.query_parameters = query_parameters
config.use_legacy_sql = False
self._query_job = client.query(
formatted_operation, job_config=config, job_id=job_id
)
# Wait for the query to finish.
try:
self._query_job.result()
except google.cloud.exceptions.GoogleCloudError as exc:
raise exceptions.DatabaseError(exc)
query_results = self._query_job._query_results
self._set_rowcount(query_results)
self._set_description(query_results.schema)
|
[
"def",
"execute",
"(",
"self",
",",
"operation",
",",
"parameters",
"=",
"None",
",",
"job_id",
"=",
"None",
")",
":",
"self",
".",
"_query_data",
"=",
"None",
"self",
".",
"_query_job",
"=",
"None",
"client",
"=",
"self",
".",
"connection",
".",
"_client",
"# The DB-API uses the pyformat formatting, since the way BigQuery does",
"# query parameters was not one of the standard options. Convert both",
"# the query and the parameters to the format expected by the client",
"# libraries.",
"formatted_operation",
"=",
"_format_operation",
"(",
"operation",
",",
"parameters",
"=",
"parameters",
")",
"query_parameters",
"=",
"_helpers",
".",
"to_query_parameters",
"(",
"parameters",
")",
"config",
"=",
"job",
".",
"QueryJobConfig",
"(",
")",
"config",
".",
"query_parameters",
"=",
"query_parameters",
"config",
".",
"use_legacy_sql",
"=",
"False",
"self",
".",
"_query_job",
"=",
"client",
".",
"query",
"(",
"formatted_operation",
",",
"job_config",
"=",
"config",
",",
"job_id",
"=",
"job_id",
")",
"# Wait for the query to finish.",
"try",
":",
"self",
".",
"_query_job",
".",
"result",
"(",
")",
"except",
"google",
".",
"cloud",
".",
"exceptions",
".",
"GoogleCloudError",
"as",
"exc",
":",
"raise",
"exceptions",
".",
"DatabaseError",
"(",
"exc",
")",
"query_results",
"=",
"self",
".",
"_query_job",
".",
"_query_results",
"self",
".",
"_set_rowcount",
"(",
"query_results",
")",
"self",
".",
"_set_description",
"(",
"query_results",
".",
"schema",
")"
] |
Prepare and execute a database operation.
.. note::
When setting query parameters, values which are "text"
(``unicode`` in Python2, ``str`` in Python3) will use
the 'STRING' BigQuery type. Values which are "bytes" (``str`` in
Python2, ``bytes`` in Python3), will use using the 'BYTES' type.
A `~datetime.datetime` parameter without timezone information uses
the 'DATETIME' BigQuery type (example: Global Pi Day Celebration
March 14, 2017 at 1:59pm). A `~datetime.datetime` parameter with
timezone information uses the 'TIMESTAMP' BigQuery type (example:
a wedding on April 29, 2011 at 11am, British Summer Time).
For more information about BigQuery data types, see:
https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types
``STRUCT``/``RECORD`` and ``REPEATED`` query parameters are not
yet supported. See:
https://github.com/GoogleCloudPlatform/google-cloud-python/issues/3524
:type operation: str
:param operation: A Google BigQuery query string.
:type parameters: Mapping[str, Any] or Sequence[Any]
:param parameters:
(Optional) dictionary or sequence of parameter values.
:type job_id: str
:param job_id: (Optional) The job_id to use. If not set, a job ID
is generated at random.
|
[
"Prepare",
"and",
"execute",
"a",
"database",
"operation",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigquery/google/cloud/bigquery/dbapi/cursor.py#L117-L176
|
train
|
googleapis/google-cloud-python
|
bigquery/google/cloud/bigquery/dbapi/cursor.py
|
Cursor._try_fetch
|
def _try_fetch(self, size=None):
"""Try to start fetching data, if not yet started.
Mutates self to indicate that iteration has started.
"""
if self._query_job is None:
raise exceptions.InterfaceError(
"No query results: execute() must be called before fetch."
)
is_dml = (
self._query_job.statement_type
and self._query_job.statement_type.upper() != "SELECT"
)
if is_dml:
self._query_data = iter([])
return
if self._query_data is None:
client = self.connection._client
rows_iter = client.list_rows(
self._query_job.destination,
selected_fields=self._query_job._query_results.schema,
page_size=self.arraysize,
)
self._query_data = iter(rows_iter)
|
python
|
def _try_fetch(self, size=None):
"""Try to start fetching data, if not yet started.
Mutates self to indicate that iteration has started.
"""
if self._query_job is None:
raise exceptions.InterfaceError(
"No query results: execute() must be called before fetch."
)
is_dml = (
self._query_job.statement_type
and self._query_job.statement_type.upper() != "SELECT"
)
if is_dml:
self._query_data = iter([])
return
if self._query_data is None:
client = self.connection._client
rows_iter = client.list_rows(
self._query_job.destination,
selected_fields=self._query_job._query_results.schema,
page_size=self.arraysize,
)
self._query_data = iter(rows_iter)
|
[
"def",
"_try_fetch",
"(",
"self",
",",
"size",
"=",
"None",
")",
":",
"if",
"self",
".",
"_query_job",
"is",
"None",
":",
"raise",
"exceptions",
".",
"InterfaceError",
"(",
"\"No query results: execute() must be called before fetch.\"",
")",
"is_dml",
"=",
"(",
"self",
".",
"_query_job",
".",
"statement_type",
"and",
"self",
".",
"_query_job",
".",
"statement_type",
".",
"upper",
"(",
")",
"!=",
"\"SELECT\"",
")",
"if",
"is_dml",
":",
"self",
".",
"_query_data",
"=",
"iter",
"(",
"[",
"]",
")",
"return",
"if",
"self",
".",
"_query_data",
"is",
"None",
":",
"client",
"=",
"self",
".",
"connection",
".",
"_client",
"rows_iter",
"=",
"client",
".",
"list_rows",
"(",
"self",
".",
"_query_job",
".",
"destination",
",",
"selected_fields",
"=",
"self",
".",
"_query_job",
".",
"_query_results",
".",
"schema",
",",
"page_size",
"=",
"self",
".",
"arraysize",
",",
")",
"self",
".",
"_query_data",
"=",
"iter",
"(",
"rows_iter",
")"
] |
Try to start fetching data, if not yet started.
Mutates self to indicate that iteration has started.
|
[
"Try",
"to",
"start",
"fetching",
"data",
"if",
"not",
"yet",
"started",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigquery/google/cloud/bigquery/dbapi/cursor.py#L190-L215
|
train
|
googleapis/google-cloud-python
|
bigquery/google/cloud/bigquery/dbapi/cursor.py
|
Cursor.fetchmany
|
def fetchmany(self, size=None):
"""Fetch multiple results from the last ``execute*()`` call.
.. note::
The size parameter is not used for the request/response size.
Set the ``arraysize`` attribute before calling ``execute()`` to
set the batch size.
:type size: int
:param size:
(Optional) Maximum number of rows to return. Defaults to the
``arraysize`` property value.
:rtype: List[tuple]
:returns: A list of rows.
:raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError`
if called before ``execute()``.
"""
if size is None:
size = self.arraysize
self._try_fetch(size=size)
rows = []
for row in self._query_data:
rows.append(row)
if len(rows) >= size:
break
return rows
|
python
|
def fetchmany(self, size=None):
"""Fetch multiple results from the last ``execute*()`` call.
.. note::
The size parameter is not used for the request/response size.
Set the ``arraysize`` attribute before calling ``execute()`` to
set the batch size.
:type size: int
:param size:
(Optional) Maximum number of rows to return. Defaults to the
``arraysize`` property value.
:rtype: List[tuple]
:returns: A list of rows.
:raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError`
if called before ``execute()``.
"""
if size is None:
size = self.arraysize
self._try_fetch(size=size)
rows = []
for row in self._query_data:
rows.append(row)
if len(rows) >= size:
break
return rows
|
[
"def",
"fetchmany",
"(",
"self",
",",
"size",
"=",
"None",
")",
":",
"if",
"size",
"is",
"None",
":",
"size",
"=",
"self",
".",
"arraysize",
"self",
".",
"_try_fetch",
"(",
"size",
"=",
"size",
")",
"rows",
"=",
"[",
"]",
"for",
"row",
"in",
"self",
".",
"_query_data",
":",
"rows",
".",
"append",
"(",
"row",
")",
"if",
"len",
"(",
"rows",
")",
">=",
"size",
":",
"break",
"return",
"rows"
] |
Fetch multiple results from the last ``execute*()`` call.
.. note::
The size parameter is not used for the request/response size.
Set the ``arraysize`` attribute before calling ``execute()`` to
set the batch size.
:type size: int
:param size:
(Optional) Maximum number of rows to return. Defaults to the
``arraysize`` property value.
:rtype: List[tuple]
:returns: A list of rows.
:raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError`
if called before ``execute()``.
|
[
"Fetch",
"multiple",
"results",
"from",
"the",
"last",
"execute",
"*",
"()",
"call",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigquery/google/cloud/bigquery/dbapi/cursor.py#L233-L262
|
train
|
googleapis/google-cloud-python
|
spanner/google/cloud/spanner_v1/gapic/transports/spanner_grpc_transport.py
|
SpannerGrpcTransport.create_channel
|
def create_channel(cls, address="spanner.googleapis.com:443", credentials=None):
"""Create and return a gRPC channel object.
Args:
address (str): The host for the channel to use.
credentials (~.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
Returns:
grpc.Channel: A gRPC channel object.
"""
grpc_gcp_config = grpc_gcp.api_config_from_text_pb(
pkg_resources.resource_string(__name__, _SPANNER_GRPC_CONFIG)
)
options = [(grpc_gcp.API_CONFIG_CHANNEL_ARG, grpc_gcp_config)]
return google.api_core.grpc_helpers.create_channel(
address, credentials=credentials, scopes=cls._OAUTH_SCOPES
)
|
python
|
def create_channel(cls, address="spanner.googleapis.com:443", credentials=None):
"""Create and return a gRPC channel object.
Args:
address (str): The host for the channel to use.
credentials (~.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
Returns:
grpc.Channel: A gRPC channel object.
"""
grpc_gcp_config = grpc_gcp.api_config_from_text_pb(
pkg_resources.resource_string(__name__, _SPANNER_GRPC_CONFIG)
)
options = [(grpc_gcp.API_CONFIG_CHANNEL_ARG, grpc_gcp_config)]
return google.api_core.grpc_helpers.create_channel(
address, credentials=credentials, scopes=cls._OAUTH_SCOPES
)
|
[
"def",
"create_channel",
"(",
"cls",
",",
"address",
"=",
"\"spanner.googleapis.com:443\"",
",",
"credentials",
"=",
"None",
")",
":",
"grpc_gcp_config",
"=",
"grpc_gcp",
".",
"api_config_from_text_pb",
"(",
"pkg_resources",
".",
"resource_string",
"(",
"__name__",
",",
"_SPANNER_GRPC_CONFIG",
")",
")",
"options",
"=",
"[",
"(",
"grpc_gcp",
".",
"API_CONFIG_CHANNEL_ARG",
",",
"grpc_gcp_config",
")",
"]",
"return",
"google",
".",
"api_core",
".",
"grpc_helpers",
".",
"create_channel",
"(",
"address",
",",
"credentials",
"=",
"credentials",
",",
"scopes",
"=",
"cls",
".",
"_OAUTH_SCOPES",
")"
] |
Create and return a gRPC channel object.
Args:
address (str): The host for the channel to use.
credentials (~.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
Returns:
grpc.Channel: A gRPC channel object.
|
[
"Create",
"and",
"return",
"a",
"gRPC",
"channel",
"object",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/google/cloud/spanner_v1/gapic/transports/spanner_grpc_transport.py#L78-L98
|
train
|
googleapis/google-cloud-python
|
logging/google/cloud/logging/sink.py
|
Sink._update_from_api_repr
|
def _update_from_api_repr(self, resource):
"""Helper for API methods returning sink resources."""
self.destination = resource["destination"]
self.filter_ = resource.get("filter")
self._writer_identity = resource.get("writerIdentity")
|
python
|
def _update_from_api_repr(self, resource):
"""Helper for API methods returning sink resources."""
self.destination = resource["destination"]
self.filter_ = resource.get("filter")
self._writer_identity = resource.get("writerIdentity")
|
[
"def",
"_update_from_api_repr",
"(",
"self",
",",
"resource",
")",
":",
"self",
".",
"destination",
"=",
"resource",
"[",
"\"destination\"",
"]",
"self",
".",
"filter_",
"=",
"resource",
".",
"get",
"(",
"\"filter\"",
")",
"self",
".",
"_writer_identity",
"=",
"resource",
".",
"get",
"(",
"\"writerIdentity\"",
")"
] |
Helper for API methods returning sink resources.
|
[
"Helper",
"for",
"API",
"methods",
"returning",
"sink",
"resources",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/sink.py#L75-L79
|
train
|
googleapis/google-cloud-python
|
logging/google/cloud/logging/sink.py
|
Sink.from_api_repr
|
def from_api_repr(cls, resource, client):
"""Factory: construct a sink given its API representation
:type resource: dict
:param resource: sink resource representation returned from the API
:type client: :class:`google.cloud.logging.client.Client`
:param client: Client which holds credentials and project
configuration for the sink.
:rtype: :class:`google.cloud.logging.sink.Sink`
:returns: Sink parsed from ``resource``.
:raises: :class:`ValueError` if ``client`` is not ``None`` and the
project from the resource does not agree with the project
from the client.
"""
sink_name = resource["name"]
instance = cls(sink_name, client=client)
instance._update_from_api_repr(resource)
return instance
|
python
|
def from_api_repr(cls, resource, client):
"""Factory: construct a sink given its API representation
:type resource: dict
:param resource: sink resource representation returned from the API
:type client: :class:`google.cloud.logging.client.Client`
:param client: Client which holds credentials and project
configuration for the sink.
:rtype: :class:`google.cloud.logging.sink.Sink`
:returns: Sink parsed from ``resource``.
:raises: :class:`ValueError` if ``client`` is not ``None`` and the
project from the resource does not agree with the project
from the client.
"""
sink_name = resource["name"]
instance = cls(sink_name, client=client)
instance._update_from_api_repr(resource)
return instance
|
[
"def",
"from_api_repr",
"(",
"cls",
",",
"resource",
",",
"client",
")",
":",
"sink_name",
"=",
"resource",
"[",
"\"name\"",
"]",
"instance",
"=",
"cls",
"(",
"sink_name",
",",
"client",
"=",
"client",
")",
"instance",
".",
"_update_from_api_repr",
"(",
"resource",
")",
"return",
"instance"
] |
Factory: construct a sink given its API representation
:type resource: dict
:param resource: sink resource representation returned from the API
:type client: :class:`google.cloud.logging.client.Client`
:param client: Client which holds credentials and project
configuration for the sink.
:rtype: :class:`google.cloud.logging.sink.Sink`
:returns: Sink parsed from ``resource``.
:raises: :class:`ValueError` if ``client`` is not ``None`` and the
project from the resource does not agree with the project
from the client.
|
[
"Factory",
":",
"construct",
"a",
"sink",
"given",
"its",
"API",
"representation"
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/sink.py#L82-L101
|
train
|
googleapis/google-cloud-python
|
logging/google/cloud/logging/sink.py
|
Sink.create
|
def create(self, client=None, unique_writer_identity=False):
"""API call: create the sink via a PUT request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
:type unique_writer_identity: bool
:param unique_writer_identity: (Optional) determines the kind of
IAM identity returned as
writer_identity in the new sink.
"""
client = self._require_client(client)
resource = client.sinks_api.sink_create(
self.project,
self.name,
self.filter_,
self.destination,
unique_writer_identity=unique_writer_identity,
)
self._update_from_api_repr(resource)
|
python
|
def create(self, client=None, unique_writer_identity=False):
"""API call: create the sink via a PUT request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
:type unique_writer_identity: bool
:param unique_writer_identity: (Optional) determines the kind of
IAM identity returned as
writer_identity in the new sink.
"""
client = self._require_client(client)
resource = client.sinks_api.sink_create(
self.project,
self.name,
self.filter_,
self.destination,
unique_writer_identity=unique_writer_identity,
)
self._update_from_api_repr(resource)
|
[
"def",
"create",
"(",
"self",
",",
"client",
"=",
"None",
",",
"unique_writer_identity",
"=",
"False",
")",
":",
"client",
"=",
"self",
".",
"_require_client",
"(",
"client",
")",
"resource",
"=",
"client",
".",
"sinks_api",
".",
"sink_create",
"(",
"self",
".",
"project",
",",
"self",
".",
"name",
",",
"self",
".",
"filter_",
",",
"self",
".",
"destination",
",",
"unique_writer_identity",
"=",
"unique_writer_identity",
",",
")",
"self",
".",
"_update_from_api_repr",
"(",
"resource",
")"
] |
API call: create the sink via a PUT request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
:type unique_writer_identity: bool
:param unique_writer_identity: (Optional) determines the kind of
IAM identity returned as
writer_identity in the new sink.
|
[
"API",
"call",
":",
"create",
"the",
"sink",
"via",
"a",
"PUT",
"request"
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/sink.py#L118-L142
|
train
|
googleapis/google-cloud-python
|
logging/google/cloud/logging/sink.py
|
Sink.exists
|
def exists(self, client=None):
"""API call: test for the existence of the sink via a GET request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
:rtype: bool
:returns: Boolean indicating existence of the sink.
"""
client = self._require_client(client)
try:
client.sinks_api.sink_get(self.project, self.name)
except NotFound:
return False
else:
return True
|
python
|
def exists(self, client=None):
"""API call: test for the existence of the sink via a GET request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
:rtype: bool
:returns: Boolean indicating existence of the sink.
"""
client = self._require_client(client)
try:
client.sinks_api.sink_get(self.project, self.name)
except NotFound:
return False
else:
return True
|
[
"def",
"exists",
"(",
"self",
",",
"client",
"=",
"None",
")",
":",
"client",
"=",
"self",
".",
"_require_client",
"(",
"client",
")",
"try",
":",
"client",
".",
"sinks_api",
".",
"sink_get",
"(",
"self",
".",
"project",
",",
"self",
".",
"name",
")",
"except",
"NotFound",
":",
"return",
"False",
"else",
":",
"return",
"True"
] |
API call: test for the existence of the sink via a GET request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
:rtype: bool
:returns: Boolean indicating existence of the sink.
|
[
"API",
"call",
":",
"test",
"for",
"the",
"existence",
"of",
"the",
"sink",
"via",
"a",
"GET",
"request"
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/sink.py#L144-L165
|
train
|
googleapis/google-cloud-python
|
logging/google/cloud/logging/sink.py
|
Sink.reload
|
def reload(self, client=None):
"""API call: sync local sink configuration via a GET request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
resource = client.sinks_api.sink_get(self.project, self.name)
self._update_from_api_repr(resource)
|
python
|
def reload(self, client=None):
"""API call: sync local sink configuration via a GET request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
resource = client.sinks_api.sink_get(self.project, self.name)
self._update_from_api_repr(resource)
|
[
"def",
"reload",
"(",
"self",
",",
"client",
"=",
"None",
")",
":",
"client",
"=",
"self",
".",
"_require_client",
"(",
"client",
")",
"resource",
"=",
"client",
".",
"sinks_api",
".",
"sink_get",
"(",
"self",
".",
"project",
",",
"self",
".",
"name",
")",
"self",
".",
"_update_from_api_repr",
"(",
"resource",
")"
] |
API call: sync local sink configuration via a GET request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/get
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
|
[
"API",
"call",
":",
"sync",
"local",
"sink",
"configuration",
"via",
"a",
"GET",
"request"
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/sink.py#L167-L180
|
train
|
googleapis/google-cloud-python
|
logging/google/cloud/logging/sink.py
|
Sink.update
|
def update(self, client=None, unique_writer_identity=False):
"""API call: update sink configuration via a PUT request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/update
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
:type unique_writer_identity: bool
:param unique_writer_identity: (Optional) determines the kind of
IAM identity returned as
writer_identity in the new sink.
"""
client = self._require_client(client)
resource = client.sinks_api.sink_update(
self.project,
self.name,
self.filter_,
self.destination,
unique_writer_identity=unique_writer_identity,
)
self._update_from_api_repr(resource)
|
python
|
def update(self, client=None, unique_writer_identity=False):
"""API call: update sink configuration via a PUT request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/update
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
:type unique_writer_identity: bool
:param unique_writer_identity: (Optional) determines the kind of
IAM identity returned as
writer_identity in the new sink.
"""
client = self._require_client(client)
resource = client.sinks_api.sink_update(
self.project,
self.name,
self.filter_,
self.destination,
unique_writer_identity=unique_writer_identity,
)
self._update_from_api_repr(resource)
|
[
"def",
"update",
"(",
"self",
",",
"client",
"=",
"None",
",",
"unique_writer_identity",
"=",
"False",
")",
":",
"client",
"=",
"self",
".",
"_require_client",
"(",
"client",
")",
"resource",
"=",
"client",
".",
"sinks_api",
".",
"sink_update",
"(",
"self",
".",
"project",
",",
"self",
".",
"name",
",",
"self",
".",
"filter_",
",",
"self",
".",
"destination",
",",
"unique_writer_identity",
"=",
"unique_writer_identity",
",",
")",
"self",
".",
"_update_from_api_repr",
"(",
"resource",
")"
] |
API call: update sink configuration via a PUT request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/update
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
:type unique_writer_identity: bool
:param unique_writer_identity: (Optional) determines the kind of
IAM identity returned as
writer_identity in the new sink.
|
[
"API",
"call",
":",
"update",
"sink",
"configuration",
"via",
"a",
"PUT",
"request"
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/sink.py#L182-L206
|
train
|
googleapis/google-cloud-python
|
logging/google/cloud/logging/sink.py
|
Sink.delete
|
def delete(self, client=None):
"""API call: delete a sink via a DELETE request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/delete
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
client.sinks_api.sink_delete(self.project, self.name)
|
python
|
def delete(self, client=None):
"""API call: delete a sink via a DELETE request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/delete
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
client.sinks_api.sink_delete(self.project, self.name)
|
[
"def",
"delete",
"(",
"self",
",",
"client",
"=",
"None",
")",
":",
"client",
"=",
"self",
".",
"_require_client",
"(",
"client",
")",
"client",
".",
"sinks_api",
".",
"sink_delete",
"(",
"self",
".",
"project",
",",
"self",
".",
"name",
")"
] |
API call: delete a sink via a DELETE request
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/delete
:type client: :class:`~google.cloud.logging.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
|
[
"API",
"call",
":",
"delete",
"a",
"sink",
"via",
"a",
"DELETE",
"request"
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/sink.py#L208-L220
|
train
|
googleapis/google-cloud-python
|
spanner/google/cloud/spanner_v1/streamed.py
|
_merge_float64
|
def _merge_float64(lhs, rhs, type_): # pylint: disable=unused-argument
"""Helper for '_merge_by_type'."""
lhs_kind = lhs.WhichOneof("kind")
if lhs_kind == "string_value":
return Value(string_value=lhs.string_value + rhs.string_value)
rhs_kind = rhs.WhichOneof("kind")
array_continuation = (
lhs_kind == "number_value"
and rhs_kind == "string_value"
and rhs.string_value == ""
)
if array_continuation:
return lhs
raise Unmergeable(lhs, rhs, type_)
|
python
|
def _merge_float64(lhs, rhs, type_): # pylint: disable=unused-argument
"""Helper for '_merge_by_type'."""
lhs_kind = lhs.WhichOneof("kind")
if lhs_kind == "string_value":
return Value(string_value=lhs.string_value + rhs.string_value)
rhs_kind = rhs.WhichOneof("kind")
array_continuation = (
lhs_kind == "number_value"
and rhs_kind == "string_value"
and rhs.string_value == ""
)
if array_continuation:
return lhs
raise Unmergeable(lhs, rhs, type_)
|
[
"def",
"_merge_float64",
"(",
"lhs",
",",
"rhs",
",",
"type_",
")",
":",
"# pylint: disable=unused-argument",
"lhs_kind",
"=",
"lhs",
".",
"WhichOneof",
"(",
"\"kind\"",
")",
"if",
"lhs_kind",
"==",
"\"string_value\"",
":",
"return",
"Value",
"(",
"string_value",
"=",
"lhs",
".",
"string_value",
"+",
"rhs",
".",
"string_value",
")",
"rhs_kind",
"=",
"rhs",
".",
"WhichOneof",
"(",
"\"kind\"",
")",
"array_continuation",
"=",
"(",
"lhs_kind",
"==",
"\"number_value\"",
"and",
"rhs_kind",
"==",
"\"string_value\"",
"and",
"rhs",
".",
"string_value",
"==",
"\"\"",
")",
"if",
"array_continuation",
":",
"return",
"lhs",
"raise",
"Unmergeable",
"(",
"lhs",
",",
"rhs",
",",
"type_",
")"
] |
Helper for '_merge_by_type'.
|
[
"Helper",
"for",
"_merge_by_type",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/google/cloud/spanner_v1/streamed.py#L222-L235
|
train
|
googleapis/google-cloud-python
|
spanner/google/cloud/spanner_v1/streamed.py
|
_merge_string
|
def _merge_string(lhs, rhs, type_): # pylint: disable=unused-argument
"""Helper for '_merge_by_type'."""
return Value(string_value=lhs.string_value + rhs.string_value)
|
python
|
def _merge_string(lhs, rhs, type_): # pylint: disable=unused-argument
"""Helper for '_merge_by_type'."""
return Value(string_value=lhs.string_value + rhs.string_value)
|
[
"def",
"_merge_string",
"(",
"lhs",
",",
"rhs",
",",
"type_",
")",
":",
"# pylint: disable=unused-argument",
"return",
"Value",
"(",
"string_value",
"=",
"lhs",
".",
"string_value",
"+",
"rhs",
".",
"string_value",
")"
] |
Helper for '_merge_by_type'.
|
[
"Helper",
"for",
"_merge_by_type",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/google/cloud/spanner_v1/streamed.py#L238-L240
|
train
|
googleapis/google-cloud-python
|
spanner/google/cloud/spanner_v1/streamed.py
|
_merge_array
|
def _merge_array(lhs, rhs, type_):
"""Helper for '_merge_by_type'."""
element_type = type_.array_element_type
if element_type.code in _UNMERGEABLE_TYPES:
# Individual values cannot be merged, just concatenate
lhs.list_value.values.extend(rhs.list_value.values)
return lhs
lhs, rhs = list(lhs.list_value.values), list(rhs.list_value.values)
# Sanity check: If either list is empty, short-circuit.
# This is effectively a no-op.
if not len(lhs) or not len(rhs):
return Value(list_value=ListValue(values=(lhs + rhs)))
first = rhs.pop(0)
if first.HasField("null_value"): # can't merge
lhs.append(first)
else:
last = lhs.pop()
try:
merged = _merge_by_type(last, first, element_type)
except Unmergeable:
lhs.append(last)
lhs.append(first)
else:
lhs.append(merged)
return Value(list_value=ListValue(values=(lhs + rhs)))
|
python
|
def _merge_array(lhs, rhs, type_):
"""Helper for '_merge_by_type'."""
element_type = type_.array_element_type
if element_type.code in _UNMERGEABLE_TYPES:
# Individual values cannot be merged, just concatenate
lhs.list_value.values.extend(rhs.list_value.values)
return lhs
lhs, rhs = list(lhs.list_value.values), list(rhs.list_value.values)
# Sanity check: If either list is empty, short-circuit.
# This is effectively a no-op.
if not len(lhs) or not len(rhs):
return Value(list_value=ListValue(values=(lhs + rhs)))
first = rhs.pop(0)
if first.HasField("null_value"): # can't merge
lhs.append(first)
else:
last = lhs.pop()
try:
merged = _merge_by_type(last, first, element_type)
except Unmergeable:
lhs.append(last)
lhs.append(first)
else:
lhs.append(merged)
return Value(list_value=ListValue(values=(lhs + rhs)))
|
[
"def",
"_merge_array",
"(",
"lhs",
",",
"rhs",
",",
"type_",
")",
":",
"element_type",
"=",
"type_",
".",
"array_element_type",
"if",
"element_type",
".",
"code",
"in",
"_UNMERGEABLE_TYPES",
":",
"# Individual values cannot be merged, just concatenate",
"lhs",
".",
"list_value",
".",
"values",
".",
"extend",
"(",
"rhs",
".",
"list_value",
".",
"values",
")",
"return",
"lhs",
"lhs",
",",
"rhs",
"=",
"list",
"(",
"lhs",
".",
"list_value",
".",
"values",
")",
",",
"list",
"(",
"rhs",
".",
"list_value",
".",
"values",
")",
"# Sanity check: If either list is empty, short-circuit.",
"# This is effectively a no-op.",
"if",
"not",
"len",
"(",
"lhs",
")",
"or",
"not",
"len",
"(",
"rhs",
")",
":",
"return",
"Value",
"(",
"list_value",
"=",
"ListValue",
"(",
"values",
"=",
"(",
"lhs",
"+",
"rhs",
")",
")",
")",
"first",
"=",
"rhs",
".",
"pop",
"(",
"0",
")",
"if",
"first",
".",
"HasField",
"(",
"\"null_value\"",
")",
":",
"# can't merge",
"lhs",
".",
"append",
"(",
"first",
")",
"else",
":",
"last",
"=",
"lhs",
".",
"pop",
"(",
")",
"try",
":",
"merged",
"=",
"_merge_by_type",
"(",
"last",
",",
"first",
",",
"element_type",
")",
"except",
"Unmergeable",
":",
"lhs",
".",
"append",
"(",
"last",
")",
"lhs",
".",
"append",
"(",
"first",
")",
"else",
":",
"lhs",
".",
"append",
"(",
"merged",
")",
"return",
"Value",
"(",
"list_value",
"=",
"ListValue",
"(",
"values",
"=",
"(",
"lhs",
"+",
"rhs",
")",
")",
")"
] |
Helper for '_merge_by_type'.
|
[
"Helper",
"for",
"_merge_by_type",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/google/cloud/spanner_v1/streamed.py#L246-L272
|
train
|
googleapis/google-cloud-python
|
spanner/google/cloud/spanner_v1/streamed.py
|
_merge_struct
|
def _merge_struct(lhs, rhs, type_):
"""Helper for '_merge_by_type'."""
fields = type_.struct_type.fields
lhs, rhs = list(lhs.list_value.values), list(rhs.list_value.values)
candidate_type = fields[len(lhs) - 1].type
first = rhs.pop(0)
if first.HasField("null_value") or candidate_type.code in _UNMERGEABLE_TYPES:
lhs.append(first)
else:
last = lhs.pop()
lhs.append(_merge_by_type(last, first, candidate_type))
return Value(list_value=ListValue(values=lhs + rhs))
|
python
|
def _merge_struct(lhs, rhs, type_):
"""Helper for '_merge_by_type'."""
fields = type_.struct_type.fields
lhs, rhs = list(lhs.list_value.values), list(rhs.list_value.values)
candidate_type = fields[len(lhs) - 1].type
first = rhs.pop(0)
if first.HasField("null_value") or candidate_type.code in _UNMERGEABLE_TYPES:
lhs.append(first)
else:
last = lhs.pop()
lhs.append(_merge_by_type(last, first, candidate_type))
return Value(list_value=ListValue(values=lhs + rhs))
|
[
"def",
"_merge_struct",
"(",
"lhs",
",",
"rhs",
",",
"type_",
")",
":",
"fields",
"=",
"type_",
".",
"struct_type",
".",
"fields",
"lhs",
",",
"rhs",
"=",
"list",
"(",
"lhs",
".",
"list_value",
".",
"values",
")",
",",
"list",
"(",
"rhs",
".",
"list_value",
".",
"values",
")",
"candidate_type",
"=",
"fields",
"[",
"len",
"(",
"lhs",
")",
"-",
"1",
"]",
".",
"type",
"first",
"=",
"rhs",
".",
"pop",
"(",
"0",
")",
"if",
"first",
".",
"HasField",
"(",
"\"null_value\"",
")",
"or",
"candidate_type",
".",
"code",
"in",
"_UNMERGEABLE_TYPES",
":",
"lhs",
".",
"append",
"(",
"first",
")",
"else",
":",
"last",
"=",
"lhs",
".",
"pop",
"(",
")",
"lhs",
".",
"append",
"(",
"_merge_by_type",
"(",
"last",
",",
"first",
",",
"candidate_type",
")",
")",
"return",
"Value",
"(",
"list_value",
"=",
"ListValue",
"(",
"values",
"=",
"lhs",
"+",
"rhs",
")",
")"
] |
Helper for '_merge_by_type'.
|
[
"Helper",
"for",
"_merge_by_type",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/google/cloud/spanner_v1/streamed.py#L275-L286
|
train
|
googleapis/google-cloud-python
|
spanner/google/cloud/spanner_v1/streamed.py
|
_merge_by_type
|
def _merge_by_type(lhs, rhs, type_):
"""Helper for '_merge_chunk'."""
merger = _MERGE_BY_TYPE[type_.code]
return merger(lhs, rhs, type_)
|
python
|
def _merge_by_type(lhs, rhs, type_):
"""Helper for '_merge_chunk'."""
merger = _MERGE_BY_TYPE[type_.code]
return merger(lhs, rhs, type_)
|
[
"def",
"_merge_by_type",
"(",
"lhs",
",",
"rhs",
",",
"type_",
")",
":",
"merger",
"=",
"_MERGE_BY_TYPE",
"[",
"type_",
".",
"code",
"]",
"return",
"merger",
"(",
"lhs",
",",
"rhs",
",",
"type_",
")"
] |
Helper for '_merge_chunk'.
|
[
"Helper",
"for",
"_merge_chunk",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/google/cloud/spanner_v1/streamed.py#L302-L305
|
train
|
googleapis/google-cloud-python
|
spanner/google/cloud/spanner_v1/streamed.py
|
StreamedResultSet._merge_chunk
|
def _merge_chunk(self, value):
"""Merge pending chunk with next value.
:type value: :class:`~google.protobuf.struct_pb2.Value`
:param value: continuation of chunked value from previous
partial result set.
:rtype: :class:`~google.protobuf.struct_pb2.Value`
:returns: the merged value
"""
current_column = len(self._current_row)
field = self.fields[current_column]
merged = _merge_by_type(self._pending_chunk, value, field.type)
self._pending_chunk = None
return merged
|
python
|
def _merge_chunk(self, value):
"""Merge pending chunk with next value.
:type value: :class:`~google.protobuf.struct_pb2.Value`
:param value: continuation of chunked value from previous
partial result set.
:rtype: :class:`~google.protobuf.struct_pb2.Value`
:returns: the merged value
"""
current_column = len(self._current_row)
field = self.fields[current_column]
merged = _merge_by_type(self._pending_chunk, value, field.type)
self._pending_chunk = None
return merged
|
[
"def",
"_merge_chunk",
"(",
"self",
",",
"value",
")",
":",
"current_column",
"=",
"len",
"(",
"self",
".",
"_current_row",
")",
"field",
"=",
"self",
".",
"fields",
"[",
"current_column",
"]",
"merged",
"=",
"_merge_by_type",
"(",
"self",
".",
"_pending_chunk",
",",
"value",
",",
"field",
".",
"type",
")",
"self",
".",
"_pending_chunk",
"=",
"None",
"return",
"merged"
] |
Merge pending chunk with next value.
:type value: :class:`~google.protobuf.struct_pb2.Value`
:param value: continuation of chunked value from previous
partial result set.
:rtype: :class:`~google.protobuf.struct_pb2.Value`
:returns: the merged value
|
[
"Merge",
"pending",
"chunk",
"with",
"next",
"value",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/google/cloud/spanner_v1/streamed.py#L80-L94
|
train
|
googleapis/google-cloud-python
|
spanner/google/cloud/spanner_v1/streamed.py
|
StreamedResultSet._merge_values
|
def _merge_values(self, values):
"""Merge values into rows.
:type values: list of :class:`~google.protobuf.struct_pb2.Value`
:param values: non-chunked values from partial result set.
"""
width = len(self.fields)
for value in values:
index = len(self._current_row)
field = self.fields[index]
self._current_row.append(_parse_value_pb(value, field.type))
if len(self._current_row) == width:
self._rows.append(self._current_row)
self._current_row = []
|
python
|
def _merge_values(self, values):
"""Merge values into rows.
:type values: list of :class:`~google.protobuf.struct_pb2.Value`
:param values: non-chunked values from partial result set.
"""
width = len(self.fields)
for value in values:
index = len(self._current_row)
field = self.fields[index]
self._current_row.append(_parse_value_pb(value, field.type))
if len(self._current_row) == width:
self._rows.append(self._current_row)
self._current_row = []
|
[
"def",
"_merge_values",
"(",
"self",
",",
"values",
")",
":",
"width",
"=",
"len",
"(",
"self",
".",
"fields",
")",
"for",
"value",
"in",
"values",
":",
"index",
"=",
"len",
"(",
"self",
".",
"_current_row",
")",
"field",
"=",
"self",
".",
"fields",
"[",
"index",
"]",
"self",
".",
"_current_row",
".",
"append",
"(",
"_parse_value_pb",
"(",
"value",
",",
"field",
".",
"type",
")",
")",
"if",
"len",
"(",
"self",
".",
"_current_row",
")",
"==",
"width",
":",
"self",
".",
"_rows",
".",
"append",
"(",
"self",
".",
"_current_row",
")",
"self",
".",
"_current_row",
"=",
"[",
"]"
] |
Merge values into rows.
:type values: list of :class:`~google.protobuf.struct_pb2.Value`
:param values: non-chunked values from partial result set.
|
[
"Merge",
"values",
"into",
"rows",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/google/cloud/spanner_v1/streamed.py#L96-L109
|
train
|
googleapis/google-cloud-python
|
spanner/google/cloud/spanner_v1/streamed.py
|
StreamedResultSet._consume_next
|
def _consume_next(self):
"""Consume the next partial result set from the stream.
Parse the result set into new/existing rows in :attr:`_rows`
"""
response = six.next(self._response_iterator)
self._counter += 1
if self._metadata is None: # first response
metadata = self._metadata = response.metadata
source = self._source
if source is not None and source._transaction_id is None:
source._transaction_id = metadata.transaction.id
if response.HasField("stats"): # last response
self._stats = response.stats
values = list(response.values)
if self._pending_chunk is not None:
values[0] = self._merge_chunk(values[0])
if response.chunked_value:
self._pending_chunk = values.pop()
self._merge_values(values)
|
python
|
def _consume_next(self):
"""Consume the next partial result set from the stream.
Parse the result set into new/existing rows in :attr:`_rows`
"""
response = six.next(self._response_iterator)
self._counter += 1
if self._metadata is None: # first response
metadata = self._metadata = response.metadata
source = self._source
if source is not None and source._transaction_id is None:
source._transaction_id = metadata.transaction.id
if response.HasField("stats"): # last response
self._stats = response.stats
values = list(response.values)
if self._pending_chunk is not None:
values[0] = self._merge_chunk(values[0])
if response.chunked_value:
self._pending_chunk = values.pop()
self._merge_values(values)
|
[
"def",
"_consume_next",
"(",
"self",
")",
":",
"response",
"=",
"six",
".",
"next",
"(",
"self",
".",
"_response_iterator",
")",
"self",
".",
"_counter",
"+=",
"1",
"if",
"self",
".",
"_metadata",
"is",
"None",
":",
"# first response",
"metadata",
"=",
"self",
".",
"_metadata",
"=",
"response",
".",
"metadata",
"source",
"=",
"self",
".",
"_source",
"if",
"source",
"is",
"not",
"None",
"and",
"source",
".",
"_transaction_id",
"is",
"None",
":",
"source",
".",
"_transaction_id",
"=",
"metadata",
".",
"transaction",
".",
"id",
"if",
"response",
".",
"HasField",
"(",
"\"stats\"",
")",
":",
"# last response",
"self",
".",
"_stats",
"=",
"response",
".",
"stats",
"values",
"=",
"list",
"(",
"response",
".",
"values",
")",
"if",
"self",
".",
"_pending_chunk",
"is",
"not",
"None",
":",
"values",
"[",
"0",
"]",
"=",
"self",
".",
"_merge_chunk",
"(",
"values",
"[",
"0",
"]",
")",
"if",
"response",
".",
"chunked_value",
":",
"self",
".",
"_pending_chunk",
"=",
"values",
".",
"pop",
"(",
")",
"self",
".",
"_merge_values",
"(",
"values",
")"
] |
Consume the next partial result set from the stream.
Parse the result set into new/existing rows in :attr:`_rows`
|
[
"Consume",
"the",
"next",
"partial",
"result",
"set",
"from",
"the",
"stream",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/google/cloud/spanner_v1/streamed.py#L111-L136
|
train
|
googleapis/google-cloud-python
|
spanner/google/cloud/spanner_v1/streamed.py
|
StreamedResultSet.one_or_none
|
def one_or_none(self):
"""Return exactly one result, or None if there are no results.
:raises: :exc:`ValueError`: If there are multiple results.
:raises: :exc:`RuntimeError`: If consumption has already occurred,
in whole or in part.
"""
# Sanity check: Has consumption of this query already started?
# If it has, then this is an exception.
if self._metadata is not None:
raise RuntimeError(
"Can not call `.one` or `.one_or_none` after "
"stream consumption has already started."
)
# Consume the first result of the stream.
# If there is no first result, then return None.
iterator = iter(self)
try:
answer = next(iterator)
except StopIteration:
return None
# Attempt to consume more. This should no-op; if we get additional
# rows, then this is an error case.
try:
next(iterator)
raise ValueError("Expected one result; got more.")
except StopIteration:
return answer
|
python
|
def one_or_none(self):
"""Return exactly one result, or None if there are no results.
:raises: :exc:`ValueError`: If there are multiple results.
:raises: :exc:`RuntimeError`: If consumption has already occurred,
in whole or in part.
"""
# Sanity check: Has consumption of this query already started?
# If it has, then this is an exception.
if self._metadata is not None:
raise RuntimeError(
"Can not call `.one` or `.one_or_none` after "
"stream consumption has already started."
)
# Consume the first result of the stream.
# If there is no first result, then return None.
iterator = iter(self)
try:
answer = next(iterator)
except StopIteration:
return None
# Attempt to consume more. This should no-op; if we get additional
# rows, then this is an error case.
try:
next(iterator)
raise ValueError("Expected one result; got more.")
except StopIteration:
return answer
|
[
"def",
"one_or_none",
"(",
"self",
")",
":",
"# Sanity check: Has consumption of this query already started?",
"# If it has, then this is an exception.",
"if",
"self",
".",
"_metadata",
"is",
"not",
"None",
":",
"raise",
"RuntimeError",
"(",
"\"Can not call `.one` or `.one_or_none` after \"",
"\"stream consumption has already started.\"",
")",
"# Consume the first result of the stream.",
"# If there is no first result, then return None.",
"iterator",
"=",
"iter",
"(",
"self",
")",
"try",
":",
"answer",
"=",
"next",
"(",
"iterator",
")",
"except",
"StopIteration",
":",
"return",
"None",
"# Attempt to consume more. This should no-op; if we get additional",
"# rows, then this is an error case.",
"try",
":",
"next",
"(",
"iterator",
")",
"raise",
"ValueError",
"(",
"\"Expected one result; got more.\"",
")",
"except",
"StopIteration",
":",
"return",
"answer"
] |
Return exactly one result, or None if there are no results.
:raises: :exc:`ValueError`: If there are multiple results.
:raises: :exc:`RuntimeError`: If consumption has already occurred,
in whole or in part.
|
[
"Return",
"exactly",
"one",
"result",
"or",
"None",
"if",
"there",
"are",
"no",
"results",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/spanner/google/cloud/spanner_v1/streamed.py#L163-L192
|
train
|
googleapis/google-cloud-python
|
vision/google/cloud/vision_v1p3beta1/gapic/product_search_client.py
|
ProductSearchClient.product_set_path
|
def product_set_path(cls, project, location, product_set):
"""Return a fully-qualified product_set string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/productSets/{product_set}",
project=project,
location=location,
product_set=product_set,
)
|
python
|
def product_set_path(cls, project, location, product_set):
"""Return a fully-qualified product_set string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/productSets/{product_set}",
project=project,
location=location,
product_set=product_set,
)
|
[
"def",
"product_set_path",
"(",
"cls",
",",
"project",
",",
"location",
",",
"product_set",
")",
":",
"return",
"google",
".",
"api_core",
".",
"path_template",
".",
"expand",
"(",
"\"projects/{project}/locations/{location}/productSets/{product_set}\"",
",",
"project",
"=",
"project",
",",
"location",
"=",
"location",
",",
"product_set",
"=",
"product_set",
",",
")"
] |
Return a fully-qualified product_set string.
|
[
"Return",
"a",
"fully",
"-",
"qualified",
"product_set",
"string",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/vision/google/cloud/vision_v1p3beta1/gapic/product_search_client.py#L101-L108
|
train
|
googleapis/google-cloud-python
|
vision/google/cloud/vision_v1p3beta1/gapic/product_search_client.py
|
ProductSearchClient.product_path
|
def product_path(cls, project, location, product):
"""Return a fully-qualified product string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/products/{product}",
project=project,
location=location,
product=product,
)
|
python
|
def product_path(cls, project, location, product):
"""Return a fully-qualified product string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/products/{product}",
project=project,
location=location,
product=product,
)
|
[
"def",
"product_path",
"(",
"cls",
",",
"project",
",",
"location",
",",
"product",
")",
":",
"return",
"google",
".",
"api_core",
".",
"path_template",
".",
"expand",
"(",
"\"projects/{project}/locations/{location}/products/{product}\"",
",",
"project",
"=",
"project",
",",
"location",
"=",
"location",
",",
"product",
"=",
"product",
",",
")"
] |
Return a fully-qualified product string.
|
[
"Return",
"a",
"fully",
"-",
"qualified",
"product",
"string",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/vision/google/cloud/vision_v1p3beta1/gapic/product_search_client.py#L111-L118
|
train
|
googleapis/google-cloud-python
|
vision/google/cloud/vision_v1p3beta1/gapic/product_search_client.py
|
ProductSearchClient.reference_image_path
|
def reference_image_path(cls, project, location, product, reference_image):
"""Return a fully-qualified reference_image string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/products/{product}/referenceImages/{reference_image}",
project=project,
location=location,
product=product,
reference_image=reference_image,
)
|
python
|
def reference_image_path(cls, project, location, product, reference_image):
"""Return a fully-qualified reference_image string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/products/{product}/referenceImages/{reference_image}",
project=project,
location=location,
product=product,
reference_image=reference_image,
)
|
[
"def",
"reference_image_path",
"(",
"cls",
",",
"project",
",",
"location",
",",
"product",
",",
"reference_image",
")",
":",
"return",
"google",
".",
"api_core",
".",
"path_template",
".",
"expand",
"(",
"\"projects/{project}/locations/{location}/products/{product}/referenceImages/{reference_image}\"",
",",
"project",
"=",
"project",
",",
"location",
"=",
"location",
",",
"product",
"=",
"product",
",",
"reference_image",
"=",
"reference_image",
",",
")"
] |
Return a fully-qualified reference_image string.
|
[
"Return",
"a",
"fully",
"-",
"qualified",
"reference_image",
"string",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/vision/google/cloud/vision_v1p3beta1/gapic/product_search_client.py#L121-L129
|
train
|
googleapis/google-cloud-python
|
vision/google/cloud/vision_v1p3beta1/gapic/product_search_client.py
|
ProductSearchClient.create_product_set
|
def create_product_set(
self,
parent,
product_set,
product_set_id,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates and returns a new ProductSet resource.
Possible errors:
- Returns INVALID\_ARGUMENT if display\_name is missing, or is longer
than 4096 characters.
Example:
>>> from google.cloud import vision_v1p3beta1
>>>
>>> client = vision_v1p3beta1.ProductSearchClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `product_set`:
>>> product_set = {}
>>>
>>> # TODO: Initialize `product_set_id`:
>>> product_set_id = ''
>>>
>>> response = client.create_product_set(parent, product_set, product_set_id)
Args:
parent (str): The project in which the ProductSet should be created.
Format is ``projects/PROJECT_ID/locations/LOC_ID``.
product_set (Union[dict, ~google.cloud.vision_v1p3beta1.types.ProductSet]): The ProductSet to create.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.vision_v1p3beta1.types.ProductSet`
product_set_id (str): A user-supplied resource id for this ProductSet. If set, the server will
attempt to use this value as the resource id. If it is already in use,
an error is returned with code ALREADY\_EXISTS. Must be at most 128
characters long. It cannot contain the character ``/``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.vision_v1p3beta1.types.ProductSet` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_product_set" not in self._inner_api_calls:
self._inner_api_calls[
"create_product_set"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_product_set,
default_retry=self._method_configs["CreateProductSet"].retry,
default_timeout=self._method_configs["CreateProductSet"].timeout,
client_info=self._client_info,
)
request = product_search_service_pb2.CreateProductSetRequest(
parent=parent, product_set=product_set, product_set_id=product_set_id
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_product_set"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
python
|
def create_product_set(
self,
parent,
product_set,
product_set_id,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates and returns a new ProductSet resource.
Possible errors:
- Returns INVALID\_ARGUMENT if display\_name is missing, or is longer
than 4096 characters.
Example:
>>> from google.cloud import vision_v1p3beta1
>>>
>>> client = vision_v1p3beta1.ProductSearchClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `product_set`:
>>> product_set = {}
>>>
>>> # TODO: Initialize `product_set_id`:
>>> product_set_id = ''
>>>
>>> response = client.create_product_set(parent, product_set, product_set_id)
Args:
parent (str): The project in which the ProductSet should be created.
Format is ``projects/PROJECT_ID/locations/LOC_ID``.
product_set (Union[dict, ~google.cloud.vision_v1p3beta1.types.ProductSet]): The ProductSet to create.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.vision_v1p3beta1.types.ProductSet`
product_set_id (str): A user-supplied resource id for this ProductSet. If set, the server will
attempt to use this value as the resource id. If it is already in use,
an error is returned with code ALREADY\_EXISTS. Must be at most 128
characters long. It cannot contain the character ``/``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.vision_v1p3beta1.types.ProductSet` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_product_set" not in self._inner_api_calls:
self._inner_api_calls[
"create_product_set"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_product_set,
default_retry=self._method_configs["CreateProductSet"].retry,
default_timeout=self._method_configs["CreateProductSet"].timeout,
client_info=self._client_info,
)
request = product_search_service_pb2.CreateProductSetRequest(
parent=parent, product_set=product_set, product_set_id=product_set_id
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_product_set"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
[
"def",
"create_product_set",
"(",
"self",
",",
"parent",
",",
"product_set",
",",
"product_set_id",
",",
"retry",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"metadata",
"=",
"None",
",",
")",
":",
"# Wrap the transport method to add retry and timeout logic.",
"if",
"\"create_product_set\"",
"not",
"in",
"self",
".",
"_inner_api_calls",
":",
"self",
".",
"_inner_api_calls",
"[",
"\"create_product_set\"",
"]",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"wrap_method",
"(",
"self",
".",
"transport",
".",
"create_product_set",
",",
"default_retry",
"=",
"self",
".",
"_method_configs",
"[",
"\"CreateProductSet\"",
"]",
".",
"retry",
",",
"default_timeout",
"=",
"self",
".",
"_method_configs",
"[",
"\"CreateProductSet\"",
"]",
".",
"timeout",
",",
"client_info",
"=",
"self",
".",
"_client_info",
",",
")",
"request",
"=",
"product_search_service_pb2",
".",
"CreateProductSetRequest",
"(",
"parent",
"=",
"parent",
",",
"product_set",
"=",
"product_set",
",",
"product_set_id",
"=",
"product_set_id",
")",
"if",
"metadata",
"is",
"None",
":",
"metadata",
"=",
"[",
"]",
"metadata",
"=",
"list",
"(",
"metadata",
")",
"try",
":",
"routing_header",
"=",
"[",
"(",
"\"parent\"",
",",
"parent",
")",
"]",
"except",
"AttributeError",
":",
"pass",
"else",
":",
"routing_metadata",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"routing_header",
".",
"to_grpc_metadata",
"(",
"routing_header",
")",
"metadata",
".",
"append",
"(",
"routing_metadata",
")",
"return",
"self",
".",
"_inner_api_calls",
"[",
"\"create_product_set\"",
"]",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
")"
] |
Creates and returns a new ProductSet resource.
Possible errors:
- Returns INVALID\_ARGUMENT if display\_name is missing, or is longer
than 4096 characters.
Example:
>>> from google.cloud import vision_v1p3beta1
>>>
>>> client = vision_v1p3beta1.ProductSearchClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `product_set`:
>>> product_set = {}
>>>
>>> # TODO: Initialize `product_set_id`:
>>> product_set_id = ''
>>>
>>> response = client.create_product_set(parent, product_set, product_set_id)
Args:
parent (str): The project in which the ProductSet should be created.
Format is ``projects/PROJECT_ID/locations/LOC_ID``.
product_set (Union[dict, ~google.cloud.vision_v1p3beta1.types.ProductSet]): The ProductSet to create.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.vision_v1p3beta1.types.ProductSet`
product_set_id (str): A user-supplied resource id for this ProductSet. If set, the server will
attempt to use this value as the resource id. If it is already in use,
an error is returned with code ALREADY\_EXISTS. Must be at most 128
characters long. It cannot contain the character ``/``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.vision_v1p3beta1.types.ProductSet` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
|
[
"Creates",
"and",
"returns",
"a",
"new",
"ProductSet",
"resource",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/vision/google/cloud/vision_v1p3beta1/gapic/product_search_client.py#L230-L322
|
train
|
googleapis/google-cloud-python
|
vision/google/cloud/vision_v1p3beta1/gapic/product_search_client.py
|
ProductSearchClient.create_reference_image
|
def create_reference_image(
self,
parent,
reference_image,
reference_image_id,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates and returns a new ReferenceImage resource.
The ``bounding_poly`` field is optional. If ``bounding_poly`` is not
specified, the system will try to detect regions of interest in the
image that are compatible with the product\_category on the parent
product. If it is specified, detection is ALWAYS skipped. The system
converts polygons into non-rotated rectangles.
Note that the pipeline will resize the image if the image resolution is
too large to process (above 50MP).
Possible errors:
- Returns INVALID\_ARGUMENT if the image\_uri is missing or longer than
4096 characters.
- Returns INVALID\_ARGUMENT if the product does not exist.
- Returns INVALID\_ARGUMENT if bounding\_poly is not provided, and
nothing compatible with the parent product's product\_category is
detected.
- Returns INVALID\_ARGUMENT if bounding\_poly contains more than 10
polygons.
Example:
>>> from google.cloud import vision_v1p3beta1
>>>
>>> client = vision_v1p3beta1.ProductSearchClient()
>>>
>>> parent = client.product_path('[PROJECT]', '[LOCATION]', '[PRODUCT]')
>>>
>>> # TODO: Initialize `reference_image`:
>>> reference_image = {}
>>>
>>> # TODO: Initialize `reference_image_id`:
>>> reference_image_id = ''
>>>
>>> response = client.create_reference_image(parent, reference_image, reference_image_id)
Args:
parent (str): Resource name of the product in which to create the reference image.
Format is ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID``.
reference_image (Union[dict, ~google.cloud.vision_v1p3beta1.types.ReferenceImage]): The reference image to create.
If an image ID is specified, it is ignored.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.vision_v1p3beta1.types.ReferenceImage`
reference_image_id (str): A user-supplied resource id for the ReferenceImage to be added. If set,
the server will attempt to use this value as the resource id. If it is
already in use, an error is returned with code ALREADY\_EXISTS. Must be
at most 128 characters long. It cannot contain the character ``/``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.vision_v1p3beta1.types.ReferenceImage` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_reference_image" not in self._inner_api_calls:
self._inner_api_calls[
"create_reference_image"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_reference_image,
default_retry=self._method_configs["CreateReferenceImage"].retry,
default_timeout=self._method_configs["CreateReferenceImage"].timeout,
client_info=self._client_info,
)
request = product_search_service_pb2.CreateReferenceImageRequest(
parent=parent,
reference_image=reference_image,
reference_image_id=reference_image_id,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_reference_image"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
python
|
def create_reference_image(
self,
parent,
reference_image,
reference_image_id,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates and returns a new ReferenceImage resource.
The ``bounding_poly`` field is optional. If ``bounding_poly`` is not
specified, the system will try to detect regions of interest in the
image that are compatible with the product\_category on the parent
product. If it is specified, detection is ALWAYS skipped. The system
converts polygons into non-rotated rectangles.
Note that the pipeline will resize the image if the image resolution is
too large to process (above 50MP).
Possible errors:
- Returns INVALID\_ARGUMENT if the image\_uri is missing or longer than
4096 characters.
- Returns INVALID\_ARGUMENT if the product does not exist.
- Returns INVALID\_ARGUMENT if bounding\_poly is not provided, and
nothing compatible with the parent product's product\_category is
detected.
- Returns INVALID\_ARGUMENT if bounding\_poly contains more than 10
polygons.
Example:
>>> from google.cloud import vision_v1p3beta1
>>>
>>> client = vision_v1p3beta1.ProductSearchClient()
>>>
>>> parent = client.product_path('[PROJECT]', '[LOCATION]', '[PRODUCT]')
>>>
>>> # TODO: Initialize `reference_image`:
>>> reference_image = {}
>>>
>>> # TODO: Initialize `reference_image_id`:
>>> reference_image_id = ''
>>>
>>> response = client.create_reference_image(parent, reference_image, reference_image_id)
Args:
parent (str): Resource name of the product in which to create the reference image.
Format is ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID``.
reference_image (Union[dict, ~google.cloud.vision_v1p3beta1.types.ReferenceImage]): The reference image to create.
If an image ID is specified, it is ignored.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.vision_v1p3beta1.types.ReferenceImage`
reference_image_id (str): A user-supplied resource id for the ReferenceImage to be added. If set,
the server will attempt to use this value as the resource id. If it is
already in use, an error is returned with code ALREADY\_EXISTS. Must be
at most 128 characters long. It cannot contain the character ``/``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.vision_v1p3beta1.types.ReferenceImage` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_reference_image" not in self._inner_api_calls:
self._inner_api_calls[
"create_reference_image"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_reference_image,
default_retry=self._method_configs["CreateReferenceImage"].retry,
default_timeout=self._method_configs["CreateReferenceImage"].timeout,
client_info=self._client_info,
)
request = product_search_service_pb2.CreateReferenceImageRequest(
parent=parent,
reference_image=reference_image,
reference_image_id=reference_image_id,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_reference_image"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
[
"def",
"create_reference_image",
"(",
"self",
",",
"parent",
",",
"reference_image",
",",
"reference_image_id",
",",
"retry",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"metadata",
"=",
"None",
",",
")",
":",
"# Wrap the transport method to add retry and timeout logic.",
"if",
"\"create_reference_image\"",
"not",
"in",
"self",
".",
"_inner_api_calls",
":",
"self",
".",
"_inner_api_calls",
"[",
"\"create_reference_image\"",
"]",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"wrap_method",
"(",
"self",
".",
"transport",
".",
"create_reference_image",
",",
"default_retry",
"=",
"self",
".",
"_method_configs",
"[",
"\"CreateReferenceImage\"",
"]",
".",
"retry",
",",
"default_timeout",
"=",
"self",
".",
"_method_configs",
"[",
"\"CreateReferenceImage\"",
"]",
".",
"timeout",
",",
"client_info",
"=",
"self",
".",
"_client_info",
",",
")",
"request",
"=",
"product_search_service_pb2",
".",
"CreateReferenceImageRequest",
"(",
"parent",
"=",
"parent",
",",
"reference_image",
"=",
"reference_image",
",",
"reference_image_id",
"=",
"reference_image_id",
",",
")",
"if",
"metadata",
"is",
"None",
":",
"metadata",
"=",
"[",
"]",
"metadata",
"=",
"list",
"(",
"metadata",
")",
"try",
":",
"routing_header",
"=",
"[",
"(",
"\"parent\"",
",",
"parent",
")",
"]",
"except",
"AttributeError",
":",
"pass",
"else",
":",
"routing_metadata",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"routing_header",
".",
"to_grpc_metadata",
"(",
"routing_header",
")",
"metadata",
".",
"append",
"(",
"routing_metadata",
")",
"return",
"self",
".",
"_inner_api_calls",
"[",
"\"create_reference_image\"",
"]",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
")"
] |
Creates and returns a new ReferenceImage resource.
The ``bounding_poly`` field is optional. If ``bounding_poly`` is not
specified, the system will try to detect regions of interest in the
image that are compatible with the product\_category on the parent
product. If it is specified, detection is ALWAYS skipped. The system
converts polygons into non-rotated rectangles.
Note that the pipeline will resize the image if the image resolution is
too large to process (above 50MP).
Possible errors:
- Returns INVALID\_ARGUMENT if the image\_uri is missing or longer than
4096 characters.
- Returns INVALID\_ARGUMENT if the product does not exist.
- Returns INVALID\_ARGUMENT if bounding\_poly is not provided, and
nothing compatible with the parent product's product\_category is
detected.
- Returns INVALID\_ARGUMENT if bounding\_poly contains more than 10
polygons.
Example:
>>> from google.cloud import vision_v1p3beta1
>>>
>>> client = vision_v1p3beta1.ProductSearchClient()
>>>
>>> parent = client.product_path('[PROJECT]', '[LOCATION]', '[PRODUCT]')
>>>
>>> # TODO: Initialize `reference_image`:
>>> reference_image = {}
>>>
>>> # TODO: Initialize `reference_image_id`:
>>> reference_image_id = ''
>>>
>>> response = client.create_reference_image(parent, reference_image, reference_image_id)
Args:
parent (str): Resource name of the product in which to create the reference image.
Format is ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID``.
reference_image (Union[dict, ~google.cloud.vision_v1p3beta1.types.ReferenceImage]): The reference image to create.
If an image ID is specified, it is ignored.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.vision_v1p3beta1.types.ReferenceImage`
reference_image_id (str): A user-supplied resource id for the ReferenceImage to be added. If set,
the server will attempt to use this value as the resource id. If it is
already in use, an error is returned with code ALREADY\_EXISTS. Must be
at most 128 characters long. It cannot contain the character ``/``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.vision_v1p3beta1.types.ReferenceImage` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
|
[
"Creates",
"and",
"returns",
"a",
"new",
"ReferenceImage",
"resource",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/vision/google/cloud/vision_v1p3beta1/gapic/product_search_client.py#L1136-L1246
|
train
|
googleapis/google-cloud-python
|
vision/google/cloud/vision_v1p3beta1/gapic/product_search_client.py
|
ProductSearchClient.import_product_sets
|
def import_product_sets(
self,
parent,
input_config,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Asynchronous API that imports a list of reference images to specified
product sets based on a list of image information.
The ``google.longrunning.Operation`` API can be used to keep track of
the progress and results of the request. ``Operation.metadata`` contains
``BatchOperationMetadata``. (progress) ``Operation.response`` contains
``ImportProductSetsResponse``. (results)
The input source of this method is a csv file on Google Cloud Storage.
For the format of the csv file please see
``ImportProductSetsGcsSource.csv_file_uri``.
Example:
>>> from google.cloud import vision_v1p3beta1
>>>
>>> client = vision_v1p3beta1.ProductSearchClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `input_config`:
>>> input_config = {}
>>>
>>> response = client.import_product_sets(parent, input_config)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
parent (str): The project in which the ProductSets should be imported.
Format is ``projects/PROJECT_ID/locations/LOC_ID``.
input_config (Union[dict, ~google.cloud.vision_v1p3beta1.types.ImportProductSetsInputConfig]): The input content for the list of requests.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.vision_v1p3beta1.types.ImportProductSetsInputConfig`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.vision_v1p3beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "import_product_sets" not in self._inner_api_calls:
self._inner_api_calls[
"import_product_sets"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.import_product_sets,
default_retry=self._method_configs["ImportProductSets"].retry,
default_timeout=self._method_configs["ImportProductSets"].timeout,
client_info=self._client_info,
)
request = product_search_service_pb2.ImportProductSetsRequest(
parent=parent, input_config=input_config
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
operation = self._inner_api_calls["import_product_sets"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
product_search_service_pb2.ImportProductSetsResponse,
metadata_type=product_search_service_pb2.BatchOperationMetadata,
)
|
python
|
def import_product_sets(
self,
parent,
input_config,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Asynchronous API that imports a list of reference images to specified
product sets based on a list of image information.
The ``google.longrunning.Operation`` API can be used to keep track of
the progress and results of the request. ``Operation.metadata`` contains
``BatchOperationMetadata``. (progress) ``Operation.response`` contains
``ImportProductSetsResponse``. (results)
The input source of this method is a csv file on Google Cloud Storage.
For the format of the csv file please see
``ImportProductSetsGcsSource.csv_file_uri``.
Example:
>>> from google.cloud import vision_v1p3beta1
>>>
>>> client = vision_v1p3beta1.ProductSearchClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `input_config`:
>>> input_config = {}
>>>
>>> response = client.import_product_sets(parent, input_config)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
parent (str): The project in which the ProductSets should be imported.
Format is ``projects/PROJECT_ID/locations/LOC_ID``.
input_config (Union[dict, ~google.cloud.vision_v1p3beta1.types.ImportProductSetsInputConfig]): The input content for the list of requests.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.vision_v1p3beta1.types.ImportProductSetsInputConfig`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.vision_v1p3beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "import_product_sets" not in self._inner_api_calls:
self._inner_api_calls[
"import_product_sets"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.import_product_sets,
default_retry=self._method_configs["ImportProductSets"].retry,
default_timeout=self._method_configs["ImportProductSets"].timeout,
client_info=self._client_info,
)
request = product_search_service_pb2.ImportProductSetsRequest(
parent=parent, input_config=input_config
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
operation = self._inner_api_calls["import_product_sets"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
product_search_service_pb2.ImportProductSetsResponse,
metadata_type=product_search_service_pb2.BatchOperationMetadata,
)
|
[
"def",
"import_product_sets",
"(",
"self",
",",
"parent",
",",
"input_config",
",",
"retry",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"metadata",
"=",
"None",
",",
")",
":",
"# Wrap the transport method to add retry and timeout logic.",
"if",
"\"import_product_sets\"",
"not",
"in",
"self",
".",
"_inner_api_calls",
":",
"self",
".",
"_inner_api_calls",
"[",
"\"import_product_sets\"",
"]",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"method",
".",
"wrap_method",
"(",
"self",
".",
"transport",
".",
"import_product_sets",
",",
"default_retry",
"=",
"self",
".",
"_method_configs",
"[",
"\"ImportProductSets\"",
"]",
".",
"retry",
",",
"default_timeout",
"=",
"self",
".",
"_method_configs",
"[",
"\"ImportProductSets\"",
"]",
".",
"timeout",
",",
"client_info",
"=",
"self",
".",
"_client_info",
",",
")",
"request",
"=",
"product_search_service_pb2",
".",
"ImportProductSetsRequest",
"(",
"parent",
"=",
"parent",
",",
"input_config",
"=",
"input_config",
")",
"if",
"metadata",
"is",
"None",
":",
"metadata",
"=",
"[",
"]",
"metadata",
"=",
"list",
"(",
"metadata",
")",
"try",
":",
"routing_header",
"=",
"[",
"(",
"\"parent\"",
",",
"parent",
")",
"]",
"except",
"AttributeError",
":",
"pass",
"else",
":",
"routing_metadata",
"=",
"google",
".",
"api_core",
".",
"gapic_v1",
".",
"routing_header",
".",
"to_grpc_metadata",
"(",
"routing_header",
")",
"metadata",
".",
"append",
"(",
"routing_metadata",
")",
"operation",
"=",
"self",
".",
"_inner_api_calls",
"[",
"\"import_product_sets\"",
"]",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
")",
"return",
"google",
".",
"api_core",
".",
"operation",
".",
"from_gapic",
"(",
"operation",
",",
"self",
".",
"transport",
".",
"_operations_client",
",",
"product_search_service_pb2",
".",
"ImportProductSetsResponse",
",",
"metadata_type",
"=",
"product_search_service_pb2",
".",
"BatchOperationMetadata",
",",
")"
] |
Asynchronous API that imports a list of reference images to specified
product sets based on a list of image information.
The ``google.longrunning.Operation`` API can be used to keep track of
the progress and results of the request. ``Operation.metadata`` contains
``BatchOperationMetadata``. (progress) ``Operation.response`` contains
``ImportProductSetsResponse``. (results)
The input source of this method is a csv file on Google Cloud Storage.
For the format of the csv file please see
``ImportProductSetsGcsSource.csv_file_uri``.
Example:
>>> from google.cloud import vision_v1p3beta1
>>>
>>> client = vision_v1p3beta1.ProductSearchClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `input_config`:
>>> input_config = {}
>>>
>>> response = client.import_product_sets(parent, input_config)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
parent (str): The project in which the ProductSets should be imported.
Format is ``projects/PROJECT_ID/locations/LOC_ID``.
input_config (Union[dict, ~google.cloud.vision_v1p3beta1.types.ImportProductSetsInputConfig]): The input content for the list of requests.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.vision_v1p3beta1.types.ImportProductSetsInputConfig`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.vision_v1p3beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
|
[
"Asynchronous",
"API",
"that",
"imports",
"a",
"list",
"of",
"reference",
"images",
"to",
"specified",
"product",
"sets",
"based",
"on",
"a",
"list",
"of",
"image",
"information",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/vision/google/cloud/vision_v1p3beta1/gapic/product_search_client.py#L1800-L1904
|
train
|
googleapis/google-cloud-python
|
logging/google/cloud/logging/_gapic.py
|
_parse_log_entry
|
def _parse_log_entry(entry_pb):
"""Special helper to parse ``LogEntry`` protobuf into a dictionary.
The ``proto_payload`` field in ``LogEntry`` is of type ``Any``. This
can be problematic if the type URL in the payload isn't in the
``google.protobuf`` registry. To help with parsing unregistered types,
this function will remove ``proto_payload`` before parsing.
:type entry_pb: :class:`.log_entry_pb2.LogEntry`
:param entry_pb: Log entry protobuf.
:rtype: dict
:returns: The parsed log entry. The ``protoPayload`` key may contain
the raw ``Any`` protobuf from ``entry_pb.proto_payload`` if
it could not be parsed.
"""
try:
return MessageToDict(entry_pb)
except TypeError:
if entry_pb.HasField("proto_payload"):
proto_payload = entry_pb.proto_payload
entry_pb.ClearField("proto_payload")
entry_mapping = MessageToDict(entry_pb)
entry_mapping["protoPayload"] = proto_payload
return entry_mapping
else:
raise
|
python
|
def _parse_log_entry(entry_pb):
"""Special helper to parse ``LogEntry`` protobuf into a dictionary.
The ``proto_payload`` field in ``LogEntry`` is of type ``Any``. This
can be problematic if the type URL in the payload isn't in the
``google.protobuf`` registry. To help with parsing unregistered types,
this function will remove ``proto_payload`` before parsing.
:type entry_pb: :class:`.log_entry_pb2.LogEntry`
:param entry_pb: Log entry protobuf.
:rtype: dict
:returns: The parsed log entry. The ``protoPayload`` key may contain
the raw ``Any`` protobuf from ``entry_pb.proto_payload`` if
it could not be parsed.
"""
try:
return MessageToDict(entry_pb)
except TypeError:
if entry_pb.HasField("proto_payload"):
proto_payload = entry_pb.proto_payload
entry_pb.ClearField("proto_payload")
entry_mapping = MessageToDict(entry_pb)
entry_mapping["protoPayload"] = proto_payload
return entry_mapping
else:
raise
|
[
"def",
"_parse_log_entry",
"(",
"entry_pb",
")",
":",
"try",
":",
"return",
"MessageToDict",
"(",
"entry_pb",
")",
"except",
"TypeError",
":",
"if",
"entry_pb",
".",
"HasField",
"(",
"\"proto_payload\"",
")",
":",
"proto_payload",
"=",
"entry_pb",
".",
"proto_payload",
"entry_pb",
".",
"ClearField",
"(",
"\"proto_payload\"",
")",
"entry_mapping",
"=",
"MessageToDict",
"(",
"entry_pb",
")",
"entry_mapping",
"[",
"\"protoPayload\"",
"]",
"=",
"proto_payload",
"return",
"entry_mapping",
"else",
":",
"raise"
] |
Special helper to parse ``LogEntry`` protobuf into a dictionary.
The ``proto_payload`` field in ``LogEntry`` is of type ``Any``. This
can be problematic if the type URL in the payload isn't in the
``google.protobuf`` registry. To help with parsing unregistered types,
this function will remove ``proto_payload`` before parsing.
:type entry_pb: :class:`.log_entry_pb2.LogEntry`
:param entry_pb: Log entry protobuf.
:rtype: dict
:returns: The parsed log entry. The ``protoPayload`` key may contain
the raw ``Any`` protobuf from ``entry_pb.proto_payload`` if
it could not be parsed.
|
[
"Special",
"helper",
"to",
"parse",
"LogEntry",
"protobuf",
"into",
"a",
"dictionary",
"."
] |
85e80125a59cb10f8cb105f25ecc099e4b940b50
|
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/logging/google/cloud/logging/_gapic.py#L421-L447
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.