repo stringlengths 7 90 | file_url stringlengths 81 315 | file_path stringlengths 4 228 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 14:38:15 2026-01-05 02:33:18 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
pyet-org/pyet | https://github.com/pyet-org/pyet/blob/2e857fe5dcc7cfe8d1ef990156dd6f153769de7b/docs/conf.py | docs/conf.py | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
from datetime import date
import requests
year = date.today().strftime("%Y")
sys.path.insert(0, os.path.abspath("."))
# Get a Bibtex reference file from the Zotero group for referencing
url = "https://api.zotero.org/groups/4846265/collections/M9ZRDX2U/items/"
params = {"format": "bibtex", "style": "apa", "limit": 100}
r = requests.get(url=url, params=params)
with open("references.bib", mode="w") as file:
file.write(r.text)
# Get a Bibtex reference file from the Zotero group for publications list
url = "https://api.zotero.org/groups/4846265/collections/UR7PHVDK/items/"
params = {"format": "bibtex", "style": "apa", "limit": 100}
r = requests.get(url=url, params=params)
with open("publications.bib", mode="w") as file:
file.write(r.text)
# -- Project information -----------------------------------------------------
project = "pyet"
copyright = "{}, M. Vremec, R.A. Collenteur".format(year)
author = "M. Vremec, R.A. Collenteur"
# The full version, including alpha/beta/rc tags
release = "2020"
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.napoleon",
"sphinx.ext.doctest",
"sphinx.ext.intersphinx",
"sphinx.ext.todo",
"sphinx.ext.mathjax",
"sphinx.ext.ifconfig",
"sphinx.ext.viewcode",
"IPython.sphinxext.ipython_console_highlighting", # lowercase didn't work
"sphinx.ext.autosectionlabel",
"sphinxcontrib.bibtex",
"myst_nb",
"numpydoc",
"sphinx_design",
]
# Create custom bracket style with round brackets
# From https://sphinxcontrib-bibtex.readthedocs.io/en/latest/usage.html
from dataclasses import dataclass, field
import sphinxcontrib.bibtex.plugin
from sphinxcontrib.bibtex.style.referencing import BracketStyle
from sphinxcontrib.bibtex.style.referencing.author_year import AuthorYearReferenceStyle
def bracket_style() -> BracketStyle:
return BracketStyle(
left="(",
right=")",
)
@dataclass
class MyReferenceStyle(AuthorYearReferenceStyle):
bracket_parenthetical: BracketStyle = field(default_factory=bracket_style)
bracket_textual: BracketStyle = field(default_factory=bracket_style)
bracket_author: BracketStyle = field(default_factory=bracket_style)
bracket_label: BracketStyle = field(default_factory=bracket_style)
bracket_year: BracketStyle = field(default_factory=bracket_style)
sphinxcontrib.bibtex.plugin.register_plugin(
"sphinxcontrib.bibtex.style.referencing", "author_year_round", MyReferenceStyle
)
bibtex_bibfiles = ["references.bib", "publications.bib"]
bibtex_reference_style = "author_year_round"
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "**.ipynb_checkpoints"]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "pydata_sphinx_theme"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
html_logo = "_static/logo.png"
html_use_smartypants = True
html_show_sourcelink = True
html_theme_options = {
"use_edit_page_button": True,
"header_links_before_dropdown": 6,
"icon_links": [
{
"name": "GitHub", # Label for this link
"url": "https://github.com/pyet-org/pyet", # required
"icon": "fab fa-github-square",
"type": "fontawesome", # Default is fontawesome
}
],
}
autosummary_generate = True
numpydoc_show_class_members = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
"numpy": ("https://numpy.org/doc/stable/", None),
"pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None),
"python": ("https://docs.python.org/3/", None),
"xarray": ("https://docs.xarray.dev/en/stable/", None),
}
# -- myst_nb options ------------------------------------------------------------------
nb_execution_allow_errors = True # Allow errors in notebooks, to see the error online
nb_execution_mode = "auto"
# Enable specific MyST extensions, such as "dollarmath" for math rendering
myst_enable_extensions = [
"dollarmath",
]
# -- Numpydoc settings ----------------------------------------------------------------
numpydoc_class_members_toctree = True
numpydoc_show_class_members = False
| python | MIT | 2e857fe5dcc7cfe8d1ef990156dd6f153769de7b | 2026-01-05T07:11:19.888488Z | false |
pyet-org/pyet | https://github.com/pyet-org/pyet/blob/2e857fe5dcc7cfe8d1ef990156dd6f153769de7b/docs/examples/utils.py | docs/examples/utils.py | import numpy as np
import pandas as pd
from spotpy.objectivefunctions import kge, rsquared, bias
cm1 = 1 / 2.54 # centimeters in inches
figw_1c = 8.5 * cm1 # maximum width for 1 column
figw_2c = 17.5 * cm1 # maximum width for 2 columns
def scatter_1(ax, x, y, label="treatment", xlabel="obs", ylabel="sim",
best_fit=True, veg_ws=None):
compare = pd.DataFrame({"x": x, "y": y})
if veg_ws is not None:
compare[veg_ws == 0] = np.nan
compare = compare.dropna()
ax.plot(compare["x"], compare["y"], marker="o",
linestyle="None", markersize=2, color="k", fillstyle="none")
ax.plot([-0.1, 10], [-0.1, 10], color="dodgerblue", alpha=0.7,
linewidth="0.8")
ax.axes.set_xticks(np.arange(0, 10 + 2, 2))
ax.axes.set_yticks(np.arange(0, 10 + 2, 2))
ax.set_xlim(-0.1, 10)
ax.set_ylim(-0.1, 10)
if best_fit:
p = np.polyfit(compare["x"], compare["y"], 1)
f = np.poly1d(p)
# Calculating new x's and y's
x_new = np.linspace(0, 10, y.size)
y_new = f(x_new)
# Plotting the best fit line with the equation as a legend in latex
ax.plot(x_new, y_new, "r--", linewidth="0.8")
ax.text(0.02, 0.9, f"{label}", color="k", zorder=10,
transform=ax.transAxes)
ax.text(0.6, 0.04, "$Bias$ = " + str(
round(bias(np.asarray(compare["y"]), np.asarray(compare["x"])), 2)) +
"\n" + "$R^2$ = " + str(
round(rsquared(np.asarray(compare["y"]), np.asarray(compare["x"])),
2)) +
"\n" + "KGE = " + str(
round(kge(np.asarray(compare["y"]), np.asarray(compare["x"])), 2)),
color="k", zorder=10, transform=ax.transAxes)
return ax
| python | MIT | 2e857fe5dcc7cfe8d1ef990156dd6f153769de7b | 2026-01-05T07:11:19.888488Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/setup.py | setup.py | #!/usr/bin/env python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup configuration."""
import platform
try:
import setuptools
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
import setuptools
# Configure the required packages and scripts to install, depending on
# Python version and OS.
REQUIRED_PACKAGES = [
'httplib2>=0.8',
'fasteners>=0.14',
'oauth2client>=1.4.12',
'six>=1.12.0',
]
CLI_PACKAGES = [
'python-gflags>=3.0.6',
]
TESTING_PACKAGES = [
'mock>=1.0.1',
]
CONSOLE_SCRIPTS = [
'gen_client = apitools.gen.gen_client:main',
]
py_version = platform.python_version()
_APITOOLS_VERSION = '0.5.35'
with open('README.rst') as fileobj:
README = fileobj.read()
setuptools.setup(
name='google-apitools',
version=_APITOOLS_VERSION,
description='client libraries for humans',
long_description=README,
url='http://github.com/google/apitools',
author='Craig Citro',
author_email='craigcitro@google.com',
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
# Contained modules and scripts.
packages=setuptools.find_packages(include=['apitools']),
entry_points={'console_scripts': CONSOLE_SCRIPTS},
install_requires=REQUIRED_PACKAGES,
tests_require=REQUIRED_PACKAGES + CLI_PACKAGES + TESTING_PACKAGES,
extras_require={
'cli': CLI_PACKAGES,
'testing': TESTING_PACKAGES,
},
# Add in any packaged data.
include_package_data=True,
package_data={
'apitools.data': ['*'],
},
exclude_package_data={
'': [
'*_test.py',
'*/testing/*',
'*/testdata/*',
'base/protorpclite/test_util.py',
'gen/test_utils.py',
],
},
# PyPI package information.
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
license='Apache 2.0',
keywords='apitools',
)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/ez_setup.py | ez_setup.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bootstrap setuptools installation
If you want to use setuptools in your package's setup.py, just include this
file in the same directory with it, and add this to the top of your setup.py::
from ez_setup import use_setuptools
use_setuptools()
If you want to require a specific version of setuptools, set a download
mirror, or use an alternate download directory, you can do so by supplying
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
import sys
DEFAULT_VERSION = "0.6c11"
DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3]
md5_data = {
'setuptools-0.6c10-py2.3.egg': 'ce1e2ab5d3a0256456d9fc13800a7090',
'setuptools-0.6c10-py2.4.egg': '57d6d9d6e9b80772c59a53a8433a5dd4',
'setuptools-0.6c10-py2.5.egg': 'de46ac8b1c97c895572e5e8596aeb8c7',
'setuptools-0.6c10-py2.6.egg': '58ea40aef06da02ce641495523a0b7f5',
'setuptools-0.6c11-py2.3.egg': '2baeac6e13d414a9d28e7ba5b5a596de',
'setuptools-0.6c11-py2.4.egg': 'bd639f9b0eac4c42497034dec2ec0c2b',
'setuptools-0.6c11-py2.5.egg': '64c94f3bf7a72a13ec83e0b24f2749b2',
'setuptools-0.6c11-py2.6.egg': 'bfa92100bd772d5a213eedd356d64086',
'setuptools-0.6c8-py2.3.egg': '50759d29b349db8cfd807ba8303f1902',
'setuptools-0.6c8-py2.4.egg': 'cba38d74f7d483c06e9daa6070cce6de',
'setuptools-0.6c8-py2.5.egg': '1721747ee329dc150590a58b3e1ac95b',
'setuptools-0.6c9-py2.3.egg': 'a83c4020414807b496e4cfbe08507c03',
'setuptools-0.6c9-py2.4.egg': '260a2be2e5388d66bdaee06abec6342a',
'setuptools-0.6c9-py2.5.egg': 'fe67c3e5a17b12c0e7c541b7ea43a8e6',
'setuptools-0.6c9-py2.6.egg': 'ca37b1ff16fa2ede6e19383e7b59245a',
}
import sys, os
try: from hashlib import md5
except ImportError: from md5 import md5
def _validate_md5(egg_name, data):
if egg_name in md5_data:
digest = md5(data).hexdigest()
if digest != md5_data[egg_name]:
print >>sys.stderr, (
"md5 validation of %s failed! (Possible download problem?)"
% egg_name
)
sys.exit(2)
return data
def use_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
download_delay=15
):
"""Automatically find/download setuptools and make it available on sys.path
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end with
a '/'). `to_dir` is the directory where setuptools will be downloaded, if
it is not already available. If `download_delay` is specified, it should
be the number of seconds that will be paused before initiating a download,
should one be required. If an older version of setuptools is installed,
this routine will print a message to ``sys.stderr`` and raise SystemExit in
an attempt to abort the calling script.
"""
was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules
def do_download():
egg = download_setuptools(version, download_base, to_dir, download_delay)
sys.path.insert(0, egg)
import setuptools; setuptools.bootstrap_install_from = egg
try:
import pkg_resources
except ImportError:
return do_download()
try:
pkg_resources.require("setuptools>="+version); return
except pkg_resources.VersionConflict as e:
if was_imported:
print >>sys.stderr, (
"The required version of setuptools (>=%s) is not available, and\n"
"can't be installed while this script is running. Please install\n"
" a more recent version first, using 'easy_install -U setuptools'."
"\n\n(Currently using %r)"
) % (version, e.args[0])
sys.exit(2)
except pkg_resources.DistributionNotFound:
pass
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return do_download()
def download_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
delay = 15
):
"""Download setuptools from a specified location and return its filename
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download attempt.
"""
import urllib2, shutil
egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3])
url = download_base + egg_name
saveto = os.path.join(to_dir, egg_name)
src = dst = None
if not os.path.exists(saveto): # Avoid repeated downloads
try:
from distutils import log
if delay:
log.warn("""
---------------------------------------------------------------------------
This script requires setuptools version %s to run (even to display
help). I will attempt to download it for you (from
%s), but
you may need to enable firewall access for this script first.
I will start the download in %d seconds.
(Note: if this machine does not have network access, please obtain the file
%s
and place it in this directory before rerunning this script.)
---------------------------------------------------------------------------""",
version, download_base, delay, url
); from time import sleep; sleep(delay)
log.warn("Downloading %s", url)
src = urllib2.urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = _validate_md5(egg_name, src.read())
dst = open(saveto,"wb"); dst.write(data)
finally:
if src: src.close()
if dst: dst.close()
return os.path.realpath(saveto)
def main(argv, version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
try:
import setuptools
except ImportError:
egg = None
try:
egg = download_setuptools(version, delay=0)
sys.path.insert(0,egg)
from setuptools.command.easy_install import main
return main(list(argv)+[egg]) # we're done here
finally:
if egg and os.path.exists(egg):
os.unlink(egg)
else:
if setuptools.__version__ == '0.0.1':
print >>sys.stderr, (
"You have an obsolete version of setuptools installed. Please\n"
"remove it from your system entirely before rerunning this script."
)
sys.exit(2)
req = "setuptools>="+version
import pkg_resources
try:
pkg_resources.require(req)
except pkg_resources.VersionConflict:
try:
from setuptools.command.easy_install import main
except ImportError:
from easy_install import main
main(list(argv)+[download_setuptools(delay=0)])
sys.exit(0) # try to force an exit
else:
if argv:
from setuptools.command.easy_install import main
main(argv)
else:
print "Setuptools version",version,"or greater has been installed."
print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)'
def update_md5(filenames):
"""Update our built-in md5 registry"""
import re
for name in filenames:
base = os.path.basename(name)
f = open(name,'rb')
md5_data[base] = md5(f.read()).hexdigest()
f.close()
data = [" %r: %r,\n" % it for it in md5_data.items()]
data.sort()
repl = "".join(data)
import inspect
srcfile = inspect.getsourcefile(sys.modules[__name__])
f = open(srcfile, 'rb'); src = f.read(); f.close()
match = re.search("\nmd5_data = {\n([^}]+)}", src)
if not match:
print >>sys.stderr, "Internal error!"
sys.exit(2)
src = src[:match.start(1)] + repl + src[match.end(1):]
f = open(srcfile,'w')
f.write(src)
f.close()
if __name__=='__main__':
if len(sys.argv)>2 and sys.argv[1]=='--md5update':
update_md5(sys.argv[2:])
else:
main(sys.argv[1:])
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/regenerate_samples.py | samples/regenerate_samples.py | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Script to regenerate samples with latest client generator.
To run:
python samples/regenerate_samples.py
"""
import os
import subprocess
import sys
_SAMPLES = [
'bigquery_sample/bigquery_v2.json',
'dns_sample/dns_v1.json',
'iam_sample/iam_v1.json',
'fusiontables_sample/fusiontables_v1.json',
'servicemanagement_sample/servicemanagement_v1.json',
'storage_sample/storage_v1.json',
]
def _Generate(samples):
# insert $PWD onto PYTHONPATH
insert_python_dir = os.getcwd()
python_path = os.environ.get('PYTHONPATH')
if python_path:
python_path = os.pathsep.join([insert_python_dir, python_path])
else:
python_path = insert_python_dir
os.environ['PYTHONPATH'] = python_path
for sample in samples:
sample_dir, sample_doc = os.path.split(sample)
sample_dir = 'samples/' + sample_dir
name, ext = os.path.splitext(sample_doc)
if ext != '.json':
raise RuntimeError('Expected .json discovery doc [{0}]'
.format(sample))
api_name, api_version = name.split('_')
args = [
'python',
'apitools/gen/gen_client.py',
'--infile', 'samples/' + sample,
'--init-file', 'empty',
'--outdir={0}'.format(os.path.join(sample_dir, name)),
'--overwrite',
'--root_package',
'samples.{0}_sample.{0}_{1}'.format(api_name, api_version),
'client',
]
sys.stderr.write('Running: {}\n'.format(' '.join(args)))
subprocess.check_call(args)
if __name__ == '__main__':
_Generate(_SAMPLES)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/__init__.py | samples/__init__.py | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/uptodate_check_test.py | samples/uptodate_check_test.py | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import difflib
import unittest
import six
from apitools.gen import gen_client
from apitools.gen import test_utils
def GetSampleClientPath(api_name, *path):
return os.path.join(os.path.dirname(__file__), api_name + '_sample', *path)
def _GetContent(file_path):
with open(file_path) as f:
return f.read()
class ClientGenCliTest(unittest.TestCase):
def AssertDiffEqual(self, expected, actual):
"""Like unittest.assertEqual with a diff in the exception message."""
if expected != actual:
unified_diff = difflib.unified_diff(
expected.splitlines(), actual.splitlines())
raise AssertionError('\n'.join(unified_diff))
def _CheckGeneratedFiles(self, api_name, api_version):
prefix = api_name + '_' + api_version
with test_utils.TempDir() as tmp_dir_path:
gen_client.main([
gen_client.__file__,
'--init-file', 'empty',
'--infile',
GetSampleClientPath(api_name, prefix + '.json'),
'--outdir', tmp_dir_path,
'--overwrite',
'--root_package',
'samples.{0}_sample.{0}_{1}'.format(api_name, api_version),
'client'
])
expected_files = (
set([prefix + '_client.py',
prefix + '_messages.py',
'__init__.py']))
self.assertEqual(expected_files, set(os.listdir(tmp_dir_path)))
for expected_file in expected_files:
self.AssertDiffEqual(
_GetContent(GetSampleClientPath(
api_name, prefix, expected_file)),
_GetContent(os.path.join(tmp_dir_path, expected_file)))
def testGenClient_BigqueryDoc(self):
self._CheckGeneratedFiles('bigquery', 'v2')
def testGenClient_DnsDoc(self):
self._CheckGeneratedFiles('dns', 'v1')
def testGenClient_FusiontablesDoc(self):
self._CheckGeneratedFiles('fusiontables', 'v1')
def testGenClient_IamDoc(self):
self._CheckGeneratedFiles('iam', 'v1')
def testGenClient_ServicemanagementDoc(self):
self._CheckGeneratedFiles('servicemanagement', 'v1')
def testGenClient_StorageDoc(self):
self._CheckGeneratedFiles('storage', 'v1')
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/dns_sample/gen_dns_client_test.py | samples/dns_sample/gen_dns_client_test.py | #
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for generated sample module."""
import unittest
import six
from apitools.base.py import list_pager
from apitools.base.py.testing import mock
from samples.dns_sample.dns_v1 import dns_v1_client
from samples.dns_sample.dns_v1 import dns_v1_messages
class DnsGenClientSanityTest(unittest.TestCase):
def testBaseUrl(self):
self.assertEqual(u'https://www.googleapis.com/dns/v1/',
dns_v1_client.DnsV1.BASE_URL)
def testMessagesModule(self):
self.assertEqual(dns_v1_messages, dns_v1_client.DnsV1.MESSAGES_MODULE)
def testAttributes(self):
inner_classes = set([])
for key, value in dns_v1_client.DnsV1.__dict__.items():
if isinstance(value, six.class_types):
inner_classes.add(key)
self.assertEqual(set([
'ChangesService',
'ProjectsService',
'ManagedZonesService',
'ResourceRecordSetsService']), inner_classes)
class DnsGenClientTest(unittest.TestCase):
def setUp(self):
self.mocked_dns_v1 = mock.Client(dns_v1_client.DnsV1)
self.mocked_dns_v1.Mock()
self.addCleanup(self.mocked_dns_v1.Unmock)
def testFlatPath(self):
get_method_config = self.mocked_dns_v1.projects.GetMethodConfig('Get')
self.assertIsNone(get_method_config.flat_path)
self.assertEqual('projects/{project}',
get_method_config.relative_path)
def testRecordSetList(self):
response_record_set = dns_v1_messages.ResourceRecordSet(
kind=u"dns#resourceRecordSet",
name=u"zone.com.",
rrdatas=[u"1.2.3.4"],
ttl=21600,
type=u"A")
self.mocked_dns_v1.resourceRecordSets.List.Expect(
dns_v1_messages.DnsResourceRecordSetsListRequest(
project=u'my-project',
managedZone=u'test_zone_name',
type=u'green',
maxResults=100),
dns_v1_messages.ResourceRecordSetsListResponse(
rrsets=[response_record_set]))
results = list(list_pager.YieldFromList(
self.mocked_dns_v1.resourceRecordSets,
dns_v1_messages.DnsResourceRecordSetsListRequest(
project='my-project',
managedZone='test_zone_name',
type='green'),
limit=100, field='rrsets'))
self.assertEqual([response_record_set], results)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/dns_sample/__init__.py | samples/dns_sample/__init__.py | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/dns_sample/dns_v1/dns_v1_client.py | samples/dns_sample/dns_v1/dns_v1_client.py | """Generated client library for dns version v1."""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.py import base_api
from samples.dns_sample.dns_v1 import dns_v1_messages as messages
class DnsV1(base_api.BaseApiClient):
"""Generated client library for service dns version v1."""
MESSAGES_MODULE = messages
BASE_URL = 'https://www.googleapis.com/dns/v1/'
MTLS_BASE_URL = ''
_PACKAGE = 'dns'
_SCOPES = ['https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/ndev.clouddns.readonly', 'https://www.googleapis.com/auth/ndev.clouddns.readwrite']
_VERSION = 'v1'
_CLIENT_ID = 'CLIENT_ID'
_CLIENT_SECRET = 'CLIENT_SECRET'
_USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_CLIENT_CLASS_NAME = 'DnsV1'
_URL_VERSION = 'v1'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None, response_encoding=None):
"""Create a new dns handle."""
url = url or self.BASE_URL
super(DnsV1, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers,
response_encoding=response_encoding)
self.changes = self.ChangesService(self)
self.managedZones = self.ManagedZonesService(self)
self.projects = self.ProjectsService(self)
self.resourceRecordSets = self.ResourceRecordSetsService(self)
class ChangesService(base_api.BaseApiService):
"""Service class for the changes resource."""
_NAME = 'changes'
def __init__(self, client):
super(DnsV1.ChangesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Atomically update the ResourceRecordSet collection.
Args:
request: (DnsChangesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Change) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dns.changes.create',
ordered_params=['project', 'managedZone'],
path_params=['managedZone', 'project'],
query_params=[],
relative_path='projects/{project}/managedZones/{managedZone}/changes',
request_field='change',
request_type_name='DnsChangesCreateRequest',
response_type_name='Change',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Fetch the representation of an existing Change.
Args:
request: (DnsChangesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Change) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dns.changes.get',
ordered_params=['project', 'managedZone', 'changeId'],
path_params=['changeId', 'managedZone', 'project'],
query_params=[],
relative_path='projects/{project}/managedZones/{managedZone}/changes/{changeId}',
request_field='',
request_type_name='DnsChangesGetRequest',
response_type_name='Change',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Enumerate Changes to a ResourceRecordSet collection.
Args:
request: (DnsChangesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ChangesListResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dns.changes.list',
ordered_params=['project', 'managedZone'],
path_params=['managedZone', 'project'],
query_params=['maxResults', 'pageToken', 'sortBy', 'sortOrder'],
relative_path='projects/{project}/managedZones/{managedZone}/changes',
request_field='',
request_type_name='DnsChangesListRequest',
response_type_name='ChangesListResponse',
supports_download=False,
)
class ManagedZonesService(base_api.BaseApiService):
"""Service class for the managedZones resource."""
_NAME = 'managedZones'
def __init__(self, client):
super(DnsV1.ManagedZonesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Create a new ManagedZone.
Args:
request: (DnsManagedZonesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ManagedZone) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='dns.managedZones.create',
ordered_params=['project'],
path_params=['project'],
query_params=[],
relative_path='projects/{project}/managedZones',
request_field='managedZone',
request_type_name='DnsManagedZonesCreateRequest',
response_type_name='ManagedZone',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Delete a previously created ManagedZone.
Args:
request: (DnsManagedZonesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DnsManagedZonesDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method='DELETE',
method_id='dns.managedZones.delete',
ordered_params=['project', 'managedZone'],
path_params=['managedZone', 'project'],
query_params=[],
relative_path='projects/{project}/managedZones/{managedZone}',
request_field='',
request_type_name='DnsManagedZonesDeleteRequest',
response_type_name='DnsManagedZonesDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Fetch the representation of an existing ManagedZone.
Args:
request: (DnsManagedZonesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ManagedZone) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dns.managedZones.get',
ordered_params=['project', 'managedZone'],
path_params=['managedZone', 'project'],
query_params=[],
relative_path='projects/{project}/managedZones/{managedZone}',
request_field='',
request_type_name='DnsManagedZonesGetRequest',
response_type_name='ManagedZone',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Enumerate ManagedZones that have been created but not yet deleted.
Args:
request: (DnsManagedZonesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ManagedZonesListResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dns.managedZones.list',
ordered_params=['project'],
path_params=['project'],
query_params=['dnsName', 'maxResults', 'pageToken'],
relative_path='projects/{project}/managedZones',
request_field='',
request_type_name='DnsManagedZonesListRequest',
response_type_name='ManagedZonesListResponse',
supports_download=False,
)
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
_NAME = 'projects'
def __init__(self, client):
super(DnsV1.ProjectsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Fetch the representation of an existing Project.
Args:
request: (DnsProjectsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Project) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dns.projects.get',
ordered_params=['project'],
path_params=['project'],
query_params=[],
relative_path='projects/{project}',
request_field='',
request_type_name='DnsProjectsGetRequest',
response_type_name='Project',
supports_download=False,
)
class ResourceRecordSetsService(base_api.BaseApiService):
"""Service class for the resourceRecordSets resource."""
_NAME = 'resourceRecordSets'
def __init__(self, client):
super(DnsV1.ResourceRecordSetsService, self).__init__(client)
self._upload_configs = {
}
def List(self, request, global_params=None):
r"""Enumerate ResourceRecordSets that have been created but not yet deleted.
Args:
request: (DnsResourceRecordSetsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ResourceRecordSetsListResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dns.resourceRecordSets.list',
ordered_params=['project', 'managedZone'],
path_params=['managedZone', 'project'],
query_params=['maxResults', 'name', 'pageToken', 'type'],
relative_path='projects/{project}/managedZones/{managedZone}/rrsets',
request_field='',
request_type_name='DnsResourceRecordSetsListRequest',
response_type_name='ResourceRecordSetsListResponse',
supports_download=False,
)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/dns_sample/dns_v1/dns_v1_messages.py | samples/dns_sample/dns_v1/dns_v1_messages.py | """Generated message classes for dns version v1.
The Google Cloud DNS API provides services for configuring and serving
authoritative DNS records.
"""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.protorpclite import messages as _messages
package = 'dns'
class Change(_messages.Message):
r"""An atomic update to a collection of ResourceRecordSets.
Enums:
StatusValueValuesEnum: Status of the operation (output only).
Fields:
additions: Which ResourceRecordSets to add?
deletions: Which ResourceRecordSets to remove? Must match existing data
exactly.
id: Unique identifier for the resource; defined by the server (output
only).
kind: Identifies what kind of resource this is. Value: the fixed string
"dns#change".
startTime: The time that this operation was started by the server. This is
in RFC3339 text format.
status: Status of the operation (output only).
"""
class StatusValueValuesEnum(_messages.Enum):
r"""Status of the operation (output only).
Values:
done: <no description>
pending: <no description>
"""
done = 0
pending = 1
additions = _messages.MessageField('ResourceRecordSet', 1, repeated=True)
deletions = _messages.MessageField('ResourceRecordSet', 2, repeated=True)
id = _messages.StringField(3)
kind = _messages.StringField(4, default='dns#change')
startTime = _messages.StringField(5)
status = _messages.EnumField('StatusValueValuesEnum', 6)
class ChangesListResponse(_messages.Message):
r"""The response to a request to enumerate Changes to a ResourceRecordSets
collection.
Fields:
changes: The requested changes.
kind: Type of resource.
nextPageToken: The presence of this field indicates that there exist more
results following your last page of results in pagination order. To
fetch them, make another list request using this value as your
pagination token. In this way you can retrieve the complete contents of
even very large collections one page at a time. However, if the contents
of the collection change between the first and last paginated list
request, the set of all elements returned will be an inconsistent view
of the collection. There is no way to retrieve a "snapshot" of
collections larger than the maximum page size.
"""
changes = _messages.MessageField('Change', 1, repeated=True)
kind = _messages.StringField(2, default='dns#changesListResponse')
nextPageToken = _messages.StringField(3)
class DnsChangesCreateRequest(_messages.Message):
r"""A DnsChangesCreateRequest object.
Fields:
change: A Change resource to be passed as the request body.
managedZone: Identifies the managed zone addressed by this request. Can be
the managed zone name or id.
project: Identifies the project addressed by this request.
"""
change = _messages.MessageField('Change', 1)
managedZone = _messages.StringField(2, required=True)
project = _messages.StringField(3, required=True)
class DnsChangesGetRequest(_messages.Message):
r"""A DnsChangesGetRequest object.
Fields:
changeId: The identifier of the requested change, from a previous
ResourceRecordSetsChangeResponse.
managedZone: Identifies the managed zone addressed by this request. Can be
the managed zone name or id.
project: Identifies the project addressed by this request.
"""
changeId = _messages.StringField(1, required=True)
managedZone = _messages.StringField(2, required=True)
project = _messages.StringField(3, required=True)
class DnsChangesListRequest(_messages.Message):
r"""A DnsChangesListRequest object.
Enums:
SortByValueValuesEnum: Sorting criterion. The only supported value is
change sequence.
Fields:
managedZone: Identifies the managed zone addressed by this request. Can be
the managed zone name or id.
maxResults: Optional. Maximum number of results to be returned. If
unspecified, the server will decide how many results to return.
pageToken: Optional. A tag returned by a previous list request that was
truncated. Use this parameter to continue a previous list request.
project: Identifies the project addressed by this request.
sortBy: Sorting criterion. The only supported value is change sequence.
sortOrder: Sorting order direction: 'ascending' or 'descending'.
"""
class SortByValueValuesEnum(_messages.Enum):
r"""Sorting criterion. The only supported value is change sequence.
Values:
changeSequence: <no description>
"""
changeSequence = 0
managedZone = _messages.StringField(1, required=True)
maxResults = _messages.IntegerField(2, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(3)
project = _messages.StringField(4, required=True)
sortBy = _messages.EnumField('SortByValueValuesEnum', 5, default='changeSequence')
sortOrder = _messages.StringField(6)
class DnsManagedZonesCreateRequest(_messages.Message):
r"""A DnsManagedZonesCreateRequest object.
Fields:
managedZone: A ManagedZone resource to be passed as the request body.
project: Identifies the project addressed by this request.
"""
managedZone = _messages.MessageField('ManagedZone', 1)
project = _messages.StringField(2, required=True)
class DnsManagedZonesDeleteRequest(_messages.Message):
r"""A DnsManagedZonesDeleteRequest object.
Fields:
managedZone: Identifies the managed zone addressed by this request. Can be
the managed zone name or id.
project: Identifies the project addressed by this request.
"""
managedZone = _messages.StringField(1, required=True)
project = _messages.StringField(2, required=True)
class DnsManagedZonesDeleteResponse(_messages.Message):
r"""An empty DnsManagedZonesDelete response."""
class DnsManagedZonesGetRequest(_messages.Message):
r"""A DnsManagedZonesGetRequest object.
Fields:
managedZone: Identifies the managed zone addressed by this request. Can be
the managed zone name or id.
project: Identifies the project addressed by this request.
"""
managedZone = _messages.StringField(1, required=True)
project = _messages.StringField(2, required=True)
class DnsManagedZonesListRequest(_messages.Message):
r"""A DnsManagedZonesListRequest object.
Fields:
dnsName: Restricts the list to return only zones with this domain name.
maxResults: Optional. Maximum number of results to be returned. If
unspecified, the server will decide how many results to return.
pageToken: Optional. A tag returned by a previous list request that was
truncated. Use this parameter to continue a previous list request.
project: Identifies the project addressed by this request.
"""
dnsName = _messages.StringField(1)
maxResults = _messages.IntegerField(2, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(3)
project = _messages.StringField(4, required=True)
class DnsProjectsGetRequest(_messages.Message):
r"""A DnsProjectsGetRequest object.
Fields:
project: Identifies the project addressed by this request.
"""
project = _messages.StringField(1, required=True)
class DnsResourceRecordSetsListRequest(_messages.Message):
r"""A DnsResourceRecordSetsListRequest object.
Fields:
managedZone: Identifies the managed zone addressed by this request. Can be
the managed zone name or id.
maxResults: Optional. Maximum number of results to be returned. If
unspecified, the server will decide how many results to return.
name: Restricts the list to return only records with this fully qualified
domain name.
pageToken: Optional. A tag returned by a previous list request that was
truncated. Use this parameter to continue a previous list request.
project: Identifies the project addressed by this request.
type: Restricts the list to return only records of this type. If present,
the "name" parameter must also be present.
"""
managedZone = _messages.StringField(1, required=True)
maxResults = _messages.IntegerField(2, variant=_messages.Variant.INT32)
name = _messages.StringField(3)
pageToken = _messages.StringField(4)
project = _messages.StringField(5, required=True)
type = _messages.StringField(6)
class ManagedZone(_messages.Message):
r"""A zone is a subtree of the DNS namespace under one administrative
responsibility. A ManagedZone is a resource that represents a DNS zone
hosted by the Cloud DNS service.
Fields:
creationTime: The time that this resource was created on the server. This
is in RFC3339 text format. Output only.
description: A mutable string of at most 1024 characters associated with
this resource for the user's convenience. Has no effect on the managed
zone's function.
dnsName: The DNS name of this managed zone, for instance "example.com.".
id: Unique identifier for the resource; defined by the server (output
only)
kind: Identifies what kind of resource this is. Value: the fixed string
"dns#managedZone".
name: User assigned name for this resource. Must be unique within the
project. The name must be 1-32 characters long, must begin with a
letter, end with a letter or digit, and only contain lowercase letters,
digits or dashes.
nameServerSet: Optionally specifies the NameServerSet for this
ManagedZone. A NameServerSet is a set of DNS name servers that all host
the same ManagedZones. Most users will leave this field unset.
nameServers: Delegate your managed_zone to these virtual name servers;
defined by the server (output only)
"""
creationTime = _messages.StringField(1)
description = _messages.StringField(2)
dnsName = _messages.StringField(3)
id = _messages.IntegerField(4, variant=_messages.Variant.UINT64)
kind = _messages.StringField(5, default='dns#managedZone')
name = _messages.StringField(6)
nameServerSet = _messages.StringField(7)
nameServers = _messages.StringField(8, repeated=True)
class ManagedZonesListResponse(_messages.Message):
r"""A ManagedZonesListResponse object.
Fields:
kind: Type of resource.
managedZones: The managed zone resources.
nextPageToken: The presence of this field indicates that there exist more
results following your last page of results in pagination order. To
fetch them, make another list request using this value as your page
token. In this way you can retrieve the complete contents of even very
large collections one page at a time. However, if the contents of the
collection change between the first and last paginated list request, the
set of all elements returned will be an inconsistent view of the
collection. There is no way to retrieve a consistent snapshot of a
collection larger than the maximum page size.
"""
kind = _messages.StringField(1, default='dns#managedZonesListResponse')
managedZones = _messages.MessageField('ManagedZone', 2, repeated=True)
nextPageToken = _messages.StringField(3)
class Project(_messages.Message):
r"""A project resource. The project is a top level container for resources
including Cloud DNS ManagedZones. Projects can be created only in the APIs
console.
Fields:
id: User assigned unique identifier for the resource (output only).
kind: Identifies what kind of resource this is. Value: the fixed string
"dns#project".
number: Unique numeric identifier for the resource; defined by the server
(output only).
quota: Quotas assigned to this project (output only).
"""
id = _messages.StringField(1)
kind = _messages.StringField(2, default='dns#project')
number = _messages.IntegerField(3, variant=_messages.Variant.UINT64)
quota = _messages.MessageField('Quota', 4)
class Quota(_messages.Message):
r"""Limits associated with a Project.
Fields:
kind: Identifies what kind of resource this is. Value: the fixed string
"dns#quota".
managedZones: Maximum allowed number of managed zones in the project.
resourceRecordsPerRrset: Maximum allowed number of ResourceRecords per
ResourceRecordSet.
rrsetAdditionsPerChange: Maximum allowed number of ResourceRecordSets to
add per ChangesCreateRequest.
rrsetDeletionsPerChange: Maximum allowed number of ResourceRecordSets to
delete per ChangesCreateRequest.
rrsetsPerManagedZone: Maximum allowed number of ResourceRecordSets per
zone in the project.
totalRrdataSizePerChange: Maximum allowed size for total rrdata in one
ChangesCreateRequest in bytes.
"""
kind = _messages.StringField(1, default='dns#quota')
managedZones = _messages.IntegerField(2, variant=_messages.Variant.INT32)
resourceRecordsPerRrset = _messages.IntegerField(3, variant=_messages.Variant.INT32)
rrsetAdditionsPerChange = _messages.IntegerField(4, variant=_messages.Variant.INT32)
rrsetDeletionsPerChange = _messages.IntegerField(5, variant=_messages.Variant.INT32)
rrsetsPerManagedZone = _messages.IntegerField(6, variant=_messages.Variant.INT32)
totalRrdataSizePerChange = _messages.IntegerField(7, variant=_messages.Variant.INT32)
class ResourceRecordSet(_messages.Message):
r"""A unit of data that will be returned by the DNS servers.
Fields:
kind: Identifies what kind of resource this is. Value: the fixed string
"dns#resourceRecordSet".
name: For example, www.example.com.
rrdatas: As defined in RFC 1035 (section 5) and RFC 1034 (section 3.6.1).
ttl: Number of seconds that this ResourceRecordSet can be cached by
resolvers.
type: The identifier of a supported record type, for example, A, AAAA, MX,
TXT, and so on.
"""
kind = _messages.StringField(1, default='dns#resourceRecordSet')
name = _messages.StringField(2)
rrdatas = _messages.StringField(3, repeated=True)
ttl = _messages.IntegerField(4, variant=_messages.Variant.INT32)
type = _messages.StringField(5)
class ResourceRecordSetsListResponse(_messages.Message):
r"""A ResourceRecordSetsListResponse object.
Fields:
kind: Type of resource.
nextPageToken: The presence of this field indicates that there exist more
results following your last page of results in pagination order. To
fetch them, make another list request using this value as your
pagination token. In this way you can retrieve the complete contents of
even very large collections one page at a time. However, if the contents
of the collection change between the first and last paginated list
request, the set of all elements returned will be an inconsistent view
of the collection. There is no way to retrieve a consistent snapshot of
a collection larger than the maximum page size.
rrsets: The resource record set resources.
"""
kind = _messages.StringField(1, default='dns#resourceRecordSetsListResponse')
nextPageToken = _messages.StringField(2)
rrsets = _messages.MessageField('ResourceRecordSet', 3, repeated=True)
class StandardQueryParameters(_messages.Message):
r"""Query parameters accepted by all methods.
Enums:
AltValueValuesEnum: Data format for the response.
Fields:
alt: Data format for the response.
fields: Selector specifying which fields to include in a partial response.
key: API key. Your API key identifies your project and provides you with
API access, quota, and reports. Required unless you provide an OAuth 2.0
token.
oauth_token: OAuth 2.0 token for the current user.
prettyPrint: Returns response with indentations and line breaks.
quotaUser: Available to use for quota purposes for server-side
applications. Can be any arbitrary string assigned to a user, but should
not exceed 40 characters. Overrides userIp if both are provided.
trace: A tracing token of the form "token:<tokenid>" to include in api
requests.
userIp: IP address of the site where the request originates. Use this if
you want to enforce per-user limits.
"""
class AltValueValuesEnum(_messages.Enum):
r"""Data format for the response.
Values:
json: Responses with Content-Type of application/json
"""
json = 0
alt = _messages.EnumField('AltValueValuesEnum', 1, default='json')
fields = _messages.StringField(2)
key = _messages.StringField(3)
oauth_token = _messages.StringField(4)
prettyPrint = _messages.BooleanField(5, default=True)
quotaUser = _messages.StringField(6)
trace = _messages.StringField(7)
userIp = _messages.StringField(8)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/dns_sample/dns_v1/__init__.py | samples/dns_sample/dns_v1/__init__.py | """Package marker file."""
from __future__ import absolute_import
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/servicemanagement_sample/messages_test.py | samples/servicemanagement_sample/messages_test.py | #
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for generated servicemanagement messages module."""
import unittest
from apitools.base.py import extra_types
from samples.servicemanagement_sample.servicemanagement_v1 \
import servicemanagement_v1_messages as messages # nopep8
class MessagesTest(unittest.TestCase):
def testInstantiateMessageWithAdditionalProperties(self):
PROJECT_NAME = 'test-project'
SERVICE_NAME = 'test-service'
SERVICE_VERSION = '1.0'
prop = messages.Operation.ResponseValue.AdditionalProperty
messages.Operation(
name='operation-12345-67890',
done=False,
response=messages.Operation.ResponseValue(
additionalProperties=[
prop(key='producerProjectId',
value=extra_types.JsonValue(
string_value=PROJECT_NAME)),
prop(key='serviceName',
value=extra_types.JsonValue(
string_value=SERVICE_NAME)),
prop(key='serviceConfig',
value=extra_types.JsonValue(
object_value=extra_types.JsonObject(
properties=[
extra_types.JsonObject.Property(
key='id',
value=extra_types.JsonValue(
string_value=SERVICE_VERSION)
)
])
))
]))
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/servicemanagement_sample/__init__.py | samples/servicemanagement_sample/__init__.py | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/servicemanagement_sample/servicemanagement_v1/__init__.py | samples/servicemanagement_sample/servicemanagement_v1/__init__.py | """Package marker file."""
from __future__ import absolute_import
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_messages.py | samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_messages.py | """Generated message classes for servicemanagement version v1.
The service management API for Google Cloud Platform
"""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.protorpclite import messages as _messages
from apitools.base.py import encoding
from apitools.base.py import extra_types
package = 'servicemanagement'
class Api(_messages.Message):
r"""Api is a light-weight descriptor for a protocol buffer service.
Enums:
SyntaxValueValuesEnum: The source syntax of the service.
Fields:
methods: The methods of this api, in unspecified order.
mixins: Included APIs. See Mixin.
name: The fully qualified name of this api, including package name
followed by the api's simple name.
options: Any metadata attached to the API.
sourceContext: Source context for the protocol buffer service represented
by this message.
syntax: The source syntax of the service.
version: A version string for this api. If specified, must have the form
`major-version.minor-version`, as in `1.10`. If the minor version is
omitted, it defaults to zero. If the entire version field is empty, the
major version is derived from the package name, as outlined below. If
the field is not empty, the version in the package name will be verified
to be consistent with what is provided here. The versioning schema uses
[semantic versioning](http://semver.org) where the major version number
indicates a breaking change and the minor version an additive, non-
breaking change. Both version numbers are signals to users what to
expect from different versions, and should be carefully chosen based on
the product plan. The major version is also reflected in the package
name of the API, which must end in `v<major-version>`, as in
`google.feature.v1`. For major versions 0 and 1, the suffix can be
omitted. Zero major versions must only be used for experimental, none-GA
apis.
"""
class SyntaxValueValuesEnum(_messages.Enum):
r"""The source syntax of the service.
Values:
SYNTAX_PROTO2: Syntax `proto2`.
SYNTAX_PROTO3: Syntax `proto3`.
"""
SYNTAX_PROTO2 = 0
SYNTAX_PROTO3 = 1
methods = _messages.MessageField('Method', 1, repeated=True)
mixins = _messages.MessageField('Mixin', 2, repeated=True)
name = _messages.StringField(3)
options = _messages.MessageField('Option', 4, repeated=True)
sourceContext = _messages.MessageField('SourceContext', 5)
syntax = _messages.EnumField('SyntaxValueValuesEnum', 6)
version = _messages.StringField(7)
class AreaUnderCurveParams(_messages.Message):
r"""AreaUnderCurveParams groups the metrics relevant to generating duration
based metric from base (snapshot) metric and delta (change) metric. The
generated metric has two dimensions: resource usage metric and the
duration the metric applies. Essentially the generated metric is the Area
Under Curve(AUC) of the "duration - resource" usage curve. This AUC metric
is readily appliable to billing since "billable resource usage" depends on
resource usage and duration of the resource used. A service config may
contain multiple resources and corresponding metrics. AreaUnderCurveParams
groups the relevant ones: which snapshot_metric and change_metric are used
to produce which generated_metric.
Fields:
changeMetric: Change of resource usage at a particular timestamp. This
should a DELTA metric.
generatedMetric: Metric generated from snapshot_metric and change_metric.
This is also a DELTA metric.
snapshotMetric: Total usage of a resource at a particular timestamp. This
should be a GAUGE metric.
"""
changeMetric = _messages.StringField(1)
generatedMetric = _messages.StringField(2)
snapshotMetric = _messages.StringField(3)
class AuthProvider(_messages.Message):
r"""Configuration for an anthentication provider, including support for
[JSON Web Token (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-
web-token-32).
Fields:
id: The unique identifier of the auth provider. It will be referred to by
`AuthRequirement.provider_id`. Example: "bookstore_auth".
issuer: Identifies the principal that issued the JWT. See
https://tools.ietf.org/html/draft-ietf-oauth-json-web-
token-32#section-4.1.1 Usually a URL or an email address. Example:
https://securetoken.google.com Example:
1234567-compute@developer.gserviceaccount.com
jwksUri: URL of the provider's public key set to validate signature of the
JWT. See [OpenID Discovery](https://openid.net/specs/openid-connect-
discovery-1_0.html#ProviderMetadata). Optional if the key set document:
- can be retrieved from [OpenID
Discovery](https://openid.net/specs/openid-connect-discovery-1_0.html
of the issuer. - can be inferred from the email domain of the issuer
(e.g. a Google service account). Example:
https://www.googleapis.com/oauth2/v1/certs
"""
id = _messages.StringField(1)
issuer = _messages.StringField(2)
jwksUri = _messages.StringField(3)
class AuthRequirement(_messages.Message):
r"""User-defined authentication requirements, including support for [JSON
Web Token (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-web-
token-32).
Fields:
audiences: The list of JWT [audiences](https://tools.ietf.org/html/draft-
ietf-oauth-json-web-token-32#section-4.1.3). that are allowed to access.
A JWT containing any of these audiences will be accepted. When this
setting is absent, only JWTs with audience
"https://Service_name/API_name" will be accepted. For example, if no
audiences are in the setting, LibraryService API will only accept JWTs
with the following audience "https://library-
example.googleapis.com/google.example.library.v1.LibraryService".
Example: audiences: bookstore_android.apps.googleusercontent.com,
bookstore_web.apps.googleusercontent.com
providerId: id from authentication provider. Example: provider_id:
bookstore_auth
"""
audiences = _messages.StringField(1)
providerId = _messages.StringField(2)
class Authentication(_messages.Message):
r"""`Authentication` defines the authentication configuration for an API.
Example for an API targeted for external use: name:
calendar.googleapis.com authentication: rules: - selector:
"*" oauth: canonical_scopes:
https://www.googleapis.com/auth/calendar - selector:
google.calendar.Delegate oauth: canonical_scopes:
https://www.googleapis.com/auth/calendar.read
Fields:
providers: Defines a set of authentication providers that a service
supports.
rules: Individual rules for authentication.
"""
providers = _messages.MessageField('AuthProvider', 1, repeated=True)
rules = _messages.MessageField('AuthenticationRule', 2, repeated=True)
class AuthenticationRule(_messages.Message):
r"""Authentication rules for the service. By default, if a method has any
authentication requirements, every request must include a valid credential
matching one of the requirements. It's an error to include more than one
kind of credential in a single request. If a method doesn't have any auth
requirements, request credentials will be ignored.
Fields:
allowWithoutCredential: Whether to allow requests without a credential.
If quota is enabled, an API key is required for such request to pass the
quota check.
oauth: The requirements for OAuth credentials.
requirements: Requirements for additional authentication providers.
selector: Selects the methods to which this rule applies. Refer to
selector for syntax details.
"""
allowWithoutCredential = _messages.BooleanField(1)
oauth = _messages.MessageField('OAuthRequirements', 2)
requirements = _messages.MessageField('AuthRequirement', 3, repeated=True)
selector = _messages.StringField(4)
class Backend(_messages.Message):
r"""`Backend` defines the backend configuration for a service.
Fields:
rules: A list of backend rules providing configuration for individual API
elements.
"""
rules = _messages.MessageField('BackendRule', 1, repeated=True)
class BackendRule(_messages.Message):
r"""A backend rule provides configuration for an individual API element.
Fields:
address: The address of the API backend.
deadline: The number of seconds to wait for a response from a request.
The default depends on the deployment context.
selector: Selects the methods to which this rule applies. Refer to
selector for syntax details.
"""
address = _messages.StringField(1)
deadline = _messages.FloatField(2)
selector = _messages.StringField(3)
class Billing(_messages.Message):
r"""Billing related configuration of the service. The following example
shows how to configure metrics for billing: metrics: - name:
library.googleapis.com/read_calls metric_kind: DELTA value_type:
INT64 - name: library.googleapis.com/write_calls metric_kind:
DELTA value_type: INT64 billing: metrics: -
library.googleapis.com/read_calls - library.googleapis.com/write_calls
The next example shows how to enable billing status check and customize the
check behavior. It makes sure billing status check is included in the
`Check` method of [Service Control API](https://cloud.google.com/service-
control/). In the example, "google.storage.Get" method can be served when
the billing status is either `current` or `delinquent`, while
"google.storage.Write" method can only be served when the billing status is
`current`: billing: rules: - selector: google.storage.Get
allowed_statuses: - current - delinquent - selector:
google.storage.Write allowed_statuses: current Mostly services
should only allow `current` status when serving requests. In addition,
services can choose to allow both `current` and `delinquent` statuses when
serving read-only requests to resources. If there's no matching selector for
operation, no billing status check will be performed.
Fields:
areaUnderCurveParams: Per resource grouping for delta billing based
resource configs.
metrics: Names of the metrics to report to billing. Each name must be
defined in Service.metrics section.
rules: A list of billing status rules for configuring billing status
check.
"""
areaUnderCurveParams = _messages.MessageField('AreaUnderCurveParams', 1, repeated=True)
metrics = _messages.StringField(2, repeated=True)
rules = _messages.MessageField('BillingStatusRule', 3, repeated=True)
class BillingStatusRule(_messages.Message):
r"""Defines the billing status requirements for operations. When used with
[Service Control API](https://cloud.google.com/service-control/), the
following statuses are supported: - **current**: the associated billing
account is up to date and capable of paying for resource
usages. - **delinquent**: the associated billing account has a correctable
problem, such as late payment. Mostly services should
only allow `current` status when serving requests. In addition, services can
choose to allow both `current` and `delinquent` statuses when serving read-
only requests to resources. If the list of allowed_statuses is empty, it
means no billing requirement.
Fields:
allowedStatuses: Allowed billing statuses. The billing status check passes
if the actual billing status matches any of the provided values here.
selector: Selects the operation names to which this rule applies. Refer to
selector for syntax details.
"""
allowedStatuses = _messages.StringField(1, repeated=True)
selector = _messages.StringField(2)
class CompositeOperationMetadata(_messages.Message):
r"""Metadata for composite operations.
Messages:
OriginalRequestValue: Original request that triggered this operation.
ResponseFieldMasksValue: Defines which part of the response a child
operation will contribute. Each key of the map is the name of a child
operation. Each value is a field mask that identifies what that child
operation contributes to the response, for example, "quota_settings",
"visiblity_settings", etc.
Fields:
childOperations: The child operations. The details of the asynchronous
child operations are stored in a separate row and not in this metadata.
Only the operation name is stored here.
originalRequest: Original request that triggered this operation.
persisted: Indicates whether the requested state change has been
persisted. Once this field is set, it is guaranteed to propagate to all
backends eventually, but it may not be visible immediately. Clients that
are not concerned with waiting on propagation can stop polling the
operation once the persisted field is set
responseFieldMasks: Defines which part of the response a child operation
will contribute. Each key of the map is the name of a child operation.
Each value is a field mask that identifies what that child operation
contributes to the response, for example, "quota_settings",
"visiblity_settings", etc.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class OriginalRequestValue(_messages.Message):
r"""Original request that triggered this operation.
Messages:
AdditionalProperty: An additional property for a OriginalRequestValue
object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a OriginalRequestValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class ResponseFieldMasksValue(_messages.Message):
r"""Defines which part of the response a child operation will contribute.
Each key of the map is the name of a child operation. Each value is a
field mask that identifies what that child operation contributes to the
response, for example, "quota_settings", "visiblity_settings", etc.
Messages:
AdditionalProperty: An additional property for a ResponseFieldMasksValue
object.
Fields:
additionalProperties: Additional properties of type
ResponseFieldMasksValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a ResponseFieldMasksValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
childOperations = _messages.MessageField('Operation', 1, repeated=True)
originalRequest = _messages.MessageField('OriginalRequestValue', 2)
persisted = _messages.BooleanField(3)
responseFieldMasks = _messages.MessageField('ResponseFieldMasksValue', 4)
class ConfigFile(_messages.Message):
r"""Generic specification of a source configuration file
Enums:
FileTypeValueValuesEnum: The kind of configuration file represented. This
is used to determine the method for generating `google.api.Service`
using this file.
Fields:
contents: DEPRECATED. The contents of the configuration file. Use
file_contents moving forward.
fileContents: The bytes that constitute the file.
filePath: The file name of the configuration file (full or relative path).
fileType: The kind of configuration file represented. This is used to
determine the method for generating `google.api.Service` using this
file.
"""
class FileTypeValueValuesEnum(_messages.Enum):
r"""The kind of configuration file represented. This is used to determine
the method for generating `google.api.Service` using this file.
Values:
FILE_TYPE_UNSPECIFIED: Unknown file type.
SERVICE_CONFIG_YAML: YAML-specification of service.
OPEN_API_JSON: OpenAPI specification, serialized in JSON.
OPEN_API_YAML: OpenAPI specification, serialized in YAML.
FILE_DESCRIPTOR_SET_PROTO: FileDescriptorSet, generated by protoc. To
generate, use protoc with imports and source info included. For an
example test.proto file, the following command would put the value in
a new file named out.pb. $protoc --include_imports
--include_source_info test.proto -o out.pb
"""
FILE_TYPE_UNSPECIFIED = 0
SERVICE_CONFIG_YAML = 1
OPEN_API_JSON = 2
OPEN_API_YAML = 3
FILE_DESCRIPTOR_SET_PROTO = 4
contents = _messages.StringField(1)
fileContents = _messages.BytesField(2)
filePath = _messages.StringField(3)
fileType = _messages.EnumField('FileTypeValueValuesEnum', 4)
class ConfigOptions(_messages.Message):
r"""A set of options to cover use of source config within `ServiceManager`
and related tools.
"""
class ConfigSource(_messages.Message):
r"""Represents a user-specified configuration for a service (as opposed to
the the generated service config form provided by `google.api.Service`).
This is meant to encode service config as manipulated directly by customers,
rather than the config form resulting from toolchain generation and
normalization.
Fields:
files: Set of source configuration files that are used to generate a
service config (`google.api.Service`).
id: A unique ID for a specific instance of this message, typically
assigned by the client for tracking purpose. If empty, the server may
choose to generate one instead.
openApiSpec: OpenAPI specification
options: Options to cover use of source config within ServiceManager and
tools
protoSpec: Protocol buffer API specification
"""
files = _messages.MessageField('ConfigFile', 1, repeated=True)
id = _messages.StringField(2)
openApiSpec = _messages.MessageField('OpenApiSpec', 3)
options = _messages.MessageField('ConfigOptions', 4)
protoSpec = _messages.MessageField('ProtoSpec', 5)
class Context(_messages.Message):
r"""`Context` defines which contexts an API requests. Example:
context: rules: - selector: "*" requested: -
google.rpc.context.ProjectContext - google.rpc.context.OriginContext
The above specifies that all methods in the API request
`google.rpc.context.ProjectContext` and `google.rpc.context.OriginContext`.
Available context types are defined in package `google.rpc.context`.
Fields:
rules: List of rules for context, applicable to methods.
"""
rules = _messages.MessageField('ContextRule', 1, repeated=True)
class ContextRule(_messages.Message):
r"""A context rule provides information about the context for an individual
API element.
Fields:
provided: A list of full type names of provided contexts.
requested: A list of full type names of requested contexts.
selector: Selects the methods to which this rule applies. Refer to
selector for syntax details.
"""
provided = _messages.StringField(1, repeated=True)
requested = _messages.StringField(2, repeated=True)
selector = _messages.StringField(3)
class Control(_messages.Message):
r"""Selects and configures the service controller used by the service. The
service controller handles features like abuse, quota, billing, logging,
monitoring, etc.
Fields:
environment: The service control environment to use. If empty, no control
plane feature (like quota and billing) will be enabled.
"""
environment = _messages.StringField(1)
class ConvertConfigRequest(_messages.Message):
r"""Request message for `ConvertConfig` method.
Messages:
ConfigSpecValue: Input configuration For this version of API, the
supported type is OpenApiSpec
Fields:
configSpec: Input configuration For this version of API, the supported
type is OpenApiSpec
openApiSpec: The OpenAPI specification for an API.
serviceName: The service name to use for constructing the normalized
service configuration equivalent of the provided configuration
specification.
swaggerSpec: The swagger specification for an API.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class ConfigSpecValue(_messages.Message):
r"""Input configuration For this version of API, the supported type is
OpenApiSpec
Messages:
AdditionalProperty: An additional property for a ConfigSpecValue object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a ConfigSpecValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
configSpec = _messages.MessageField('ConfigSpecValue', 1)
openApiSpec = _messages.MessageField('OpenApiSpec', 2)
serviceName = _messages.StringField(3)
swaggerSpec = _messages.MessageField('SwaggerSpec', 4)
class ConvertConfigResponse(_messages.Message):
r"""Response message for `ConvertConfig` method.
Fields:
diagnostics: Any errors or warnings that occured during config conversion.
serviceConfig: The service configuration. Not set if errors occured during
conversion.
"""
diagnostics = _messages.MessageField('Diagnostic', 1, repeated=True)
serviceConfig = _messages.MessageField('Service', 2)
class CustomError(_messages.Message):
r"""Customize service error responses. For example, list any service
specific protobuf types that can appear in error detail lists of error
responses. Example: custom_error: types: -
google.foo.v1.CustomError - google.foo.v1.AnotherError
Fields:
rules: The list of custom error rules to select to which messages this
should apply.
types: The list of custom error detail types, e.g.
'google.foo.v1.CustomError'.
"""
rules = _messages.MessageField('CustomErrorRule', 1, repeated=True)
types = _messages.StringField(2, repeated=True)
class CustomErrorRule(_messages.Message):
r"""A custom error rule.
Fields:
isErrorType: Mark this message as possible payload in error response.
Otherwise, objects of this type will be filtered when they appear in
error payload.
selector: Selects messages to which this rule applies. Refer to selector
for syntax details.
"""
isErrorType = _messages.BooleanField(1)
selector = _messages.StringField(2)
class CustomHttpPattern(_messages.Message):
r"""A custom pattern is used for defining custom HTTP verb.
Fields:
kind: The name of this custom HTTP verb.
path: The path matched by this custom verb.
"""
kind = _messages.StringField(1)
path = _messages.StringField(2)
class CustomerSettings(_messages.Message):
r"""Settings that control how a customer (identified by a billing account)
uses a service
Fields:
customerId: ID for the customer that consumes the service (see above). The
supported types of customers are: 1. domain:{domain} A Google Apps
domain name. For example, google.com. 2.
billingAccount:{billing_account_id} A Google Cloud Plafrom billing
account. For Example, 123456-7890ab-cdef12.
quotaSettings: Settings that control how much or how fast the service can
be used by the consumer projects owned by the customer collectively.
serviceName: The name of the service. See the `ServiceManager` overview
for naming requirements.
"""
customerId = _messages.StringField(1)
quotaSettings = _messages.MessageField('QuotaSettings', 2)
serviceName = _messages.StringField(3)
class Diagnostic(_messages.Message):
r"""A collection that represents a diagnostic message (error or warning)
Enums:
KindValueValuesEnum: The kind of diagnostic information provided.
Fields:
kind: The kind of diagnostic information provided.
location: Location of the cause or context of the diagnostic information.
message: The string message of the diagnostic information.
"""
class KindValueValuesEnum(_messages.Enum):
r"""The kind of diagnostic information provided.
Values:
WARNING: Warnings and errors
ERROR: Only errors
"""
WARNING = 0
ERROR = 1
kind = _messages.EnumField('KindValueValuesEnum', 1)
location = _messages.StringField(2)
message = _messages.StringField(3)
class DisableServiceRequest(_messages.Message):
r"""Request message for DisableService method.
Fields:
consumerId: The identity of consumer resource which service disablement
will be applied to. The Google Service Management implementation
accepts the following forms: "project:<project_id>",
"project_number:<project_number>". Note: this is made compatible with
google.api.servicecontrol.v1.Operation.consumer_id.
"""
consumerId = _messages.StringField(1)
class Documentation(_messages.Message):
r"""`Documentation` provides the information for describing a service.
Example: <pre><code>documentation: summary: > The Google Calendar API
gives access to most calendar features. pages: - name: Overview
content: (== include google/foo/overview.md ==) - name: Tutorial
content: (== include google/foo/tutorial.md ==) subpages; -
name: Java content: (== include google/foo/tutorial_java.md
==) rules: - selector: google.calendar.Calendar.Get description:
> ... - selector: google.calendar.Calendar.Put description: >
... </code></pre> Documentation is provided in markdown syntax. In addition
to standard markdown features, definition lists, tables and fenced code
blocks are supported. Section headers can be provided and are interpreted
relative to the section nesting of the context where a documentation
fragment is embedded. Documentation from the IDL is merged with
documentation defined via the config at normalization time, where
documentation provided by config rules overrides IDL provided. A number of
constructs specific to the API platform are supported in documentation text.
In order to reference a proto element, the following notation can be used:
<pre><code>[fully.qualified.proto.name][]</code></pre> To override
the display text used for the link, this can be used:
<pre><code>[display text][fully.qualified.proto.name]</code></pre>
Text can be excluded from doc using the following notation:
<pre><code>(-- internal comment --)</code></pre> Comments can be
made conditional using a visibility label. The below text will be only
rendered if the `BETA` label is available: <pre><code>(--BETA: comment
for BETA users --)</code></pre> A few directives are available in
documentation. Note that directives must appear on a single line to be
properly identified. The `include` directive includes a markdown file from
an external source: <pre><code>(== include path/to/file
==)</code></pre> The `resource_for` directive marks a message to be the
resource of a collection in REST view. If it is not specified, tools attempt
to infer the resource from the operations in a collection:
<pre><code>(== resource_for v1.shelves.books ==)</code></pre> The
directive `suppress_warning` does not directly affect documentation and is
documented together with service config validation.
Fields:
documentationRootUrl: The URL to the root of documentation.
overview: Declares a single overview page. For example:
<pre><code>documentation: summary: ... overview: (== include
overview.md ==) </code></pre> This is a shortcut for the following
declaration (using pages style): <pre><code>documentation: summary:
... pages: - name: Overview content: (== include overview.md
==) </code></pre> Note: you cannot specify both `overview` field and
`pages` field.
pages: The top level pages for the documentation set.
rules: Documentation rules for individual elements of the service.
summary: A short summary of what the service does. Can only be provided by
plain text.
"""
documentationRootUrl = _messages.StringField(1)
overview = _messages.StringField(2)
pages = _messages.MessageField('Page', 3, repeated=True)
rules = _messages.MessageField('DocumentationRule', 4, repeated=True)
summary = _messages.StringField(5)
class DocumentationRule(_messages.Message):
r"""A documentation rule provides information about individual API elements.
Fields:
deprecationDescription: Deprecation description of the selected
element(s). It can be provided if an element is marked as `deprecated`.
description: Description of the selected API(s).
selector: The selector is a comma-separated list of patterns. Each pattern
is a qualified name of the element which may end in "*", indicating a
wildcard. Wildcards are only allowed at the end and for a whole
component of the qualified name, i.e. "foo.*" is ok, but not "foo.b*" or
"foo.*.bar". To specify a default for all applicable elements, the whole
pattern "*" is used.
"""
deprecationDescription = _messages.StringField(1)
description = _messages.StringField(2)
selector = _messages.StringField(3)
class EffectiveQuotaGroup(_messages.Message):
r"""An effective quota group contains both the metadata for a quota group as
derived from the service config, and the effective limits in that group as
calculated from producer and consumer overrides together with service
defaults.
Enums:
BillingInteractionValueValuesEnum:
Fields:
baseGroup: The service configuration for this quota group, minus the quota
limits, which are replaced by the effective limits below.
billingInteraction: A BillingInteractionValueValuesEnum attribute.
quotas: The usage and limit information for each limit within this quota
group.
"""
class BillingInteractionValueValuesEnum(_messages.Enum):
r"""BillingInteractionValueValuesEnum enum type.
Values:
BILLING_INTERACTION_UNSPECIFIED: The interaction between this quota
group and the project billing status is unspecified.
NONBILLABLE_ONLY: This quota group is enforced only when the consumer
project is not billable.
BILLABLE_ONLY: This quota group is enforced only when the consumer
project is billable.
ANY_BILLING_STATUS: This quota group is enforced regardless of the
consumer project's billing status.
"""
BILLING_INTERACTION_UNSPECIFIED = 0
NONBILLABLE_ONLY = 1
BILLABLE_ONLY = 2
ANY_BILLING_STATUS = 3
baseGroup = _messages.MessageField('QuotaGroup', 1)
billingInteraction = _messages.EnumField('BillingInteractionValueValuesEnum', 2)
quotas = _messages.MessageField('QuotaInfo', 3, repeated=True)
class EffectiveQuotaLimit(_messages.Message):
r"""An effective quota limit contains the metadata for a quota limit as
derived from the service config, together with fields that describe the
effective limit value and what overrides can be applied to it.
Fields:
baseLimit: The service's configuration for this quota limit.
effectiveLimit: The effective limit value, based on the stored producer
and consumer overrides and the service defaults.
key: The key used to identify this limit when applying overrides. The
consumer_overrides and producer_overrides maps are keyed by strings of
the form "QuotaGroupName/QuotaLimitName".
maxConsumerOverrideAllowed: The maximum override value that a consumer may
specify.
"""
baseLimit = _messages.MessageField('QuotaLimit', 1)
effectiveLimit = _messages.IntegerField(2)
key = _messages.StringField(3)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | true |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_client.py | samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_client.py | """Generated client library for servicemanagement version v1."""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.py import base_api
from samples.servicemanagement_sample.servicemanagement_v1 import servicemanagement_v1_messages as messages
class ServicemanagementV1(base_api.BaseApiClient):
"""Generated client library for service servicemanagement version v1."""
MESSAGES_MODULE = messages
BASE_URL = 'https://servicemanagement.googleapis.com/'
MTLS_BASE_URL = ''
_PACKAGE = 'servicemanagement'
_SCOPES = ['https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/service.management']
_VERSION = 'v1'
_CLIENT_ID = 'CLIENT_ID'
_CLIENT_SECRET = 'CLIENT_SECRET'
_USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_CLIENT_CLASS_NAME = 'ServicemanagementV1'
_URL_VERSION = 'v1'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None, response_encoding=None):
"""Create a new servicemanagement handle."""
url = url or self.BASE_URL
super(ServicemanagementV1, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers,
response_encoding=response_encoding)
self.operations = self.OperationsService(self)
self.services_accessPolicy = self.ServicesAccessPolicyService(self)
self.services_configs = self.ServicesConfigsService(self)
self.services_customerSettings = self.ServicesCustomerSettingsService(self)
self.services_projectSettings = self.ServicesProjectSettingsService(self)
self.services = self.ServicesService(self)
self.v1 = self.V1Service(self)
class OperationsService(base_api.BaseApiService):
"""Service class for the operations resource."""
_NAME = 'operations'
def __init__(self, client):
super(ServicemanagementV1.OperationsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this.
method to poll the operation result at intervals as recommended by the API
service.
Args:
request: (ServicemanagementOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='servicemanagement.operations.get',
ordered_params=['operationsId'],
path_params=['operationsId'],
query_params=[],
relative_path='v1/operations/{operationsId}',
request_field='',
request_type_name='ServicemanagementOperationsGetRequest',
response_type_name='Operation',
supports_download=False,
)
class ServicesAccessPolicyService(base_api.BaseApiService):
"""Service class for the services_accessPolicy resource."""
_NAME = 'services_accessPolicy'
def __init__(self, client):
super(ServicemanagementV1.ServicesAccessPolicyService, self).__init__(client)
self._upload_configs = {
}
def Query(self, request, global_params=None):
r"""Method to query the accessibility of a service and any associated.
visibility labels for a specified user.
Members of the producer project may call this method and specify any user.
Any user may call this method, but must specify their own email address.
In this case the method will return NOT_FOUND if the user has no access to
the service.
Args:
request: (ServicemanagementServicesAccessPolicyQueryRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(QueryUserAccessResponse) The response message.
"""
config = self.GetMethodConfig('Query')
return self._RunMethod(
config, request, global_params=global_params)
Query.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='servicemanagement.services.accessPolicy.query',
ordered_params=['serviceName'],
path_params=['serviceName'],
query_params=['userEmail'],
relative_path='v1/services/{serviceName}/accessPolicy:query',
request_field='',
request_type_name='ServicemanagementServicesAccessPolicyQueryRequest',
response_type_name='QueryUserAccessResponse',
supports_download=False,
)
class ServicesConfigsService(base_api.BaseApiService):
"""Service class for the services_configs resource."""
_NAME = 'services_configs'
def __init__(self, client):
super(ServicemanagementV1.ServicesConfigsService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a new service config (version) for a managed service. This method.
only stores the service config, but does not apply the service config to
any backend services.
Args:
request: (ServicemanagementServicesConfigsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Service) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='servicemanagement.services.configs.create',
ordered_params=['serviceName'],
path_params=['serviceName'],
query_params=[],
relative_path='v1/services/{serviceName}/configs',
request_field='service',
request_type_name='ServicemanagementServicesConfigsCreateRequest',
response_type_name='Service',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a service config (version) for a managed service. If `config_id` is.
not specified, the latest service config will be returned.
Args:
request: (ServicemanagementServicesConfigsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Service) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='servicemanagement.services.configs.get',
ordered_params=['serviceName', 'configId'],
path_params=['configId', 'serviceName'],
query_params=[],
relative_path='v1/services/{serviceName}/configs/{configId}',
request_field='',
request_type_name='ServicemanagementServicesConfigsGetRequest',
response_type_name='Service',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists the history of the service config for a managed service,.
from the newest to the oldest.
Args:
request: (ServicemanagementServicesConfigsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListServiceConfigsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='servicemanagement.services.configs.list',
ordered_params=['serviceName'],
path_params=['serviceName'],
query_params=['pageSize', 'pageToken'],
relative_path='v1/services/{serviceName}/configs',
request_field='',
request_type_name='ServicemanagementServicesConfigsListRequest',
response_type_name='ListServiceConfigsResponse',
supports_download=False,
)
def Submit(self, request, global_params=None):
r"""Creates a new service config (version) for a managed service based on.
user-supplied configuration sources files (for example: OpenAPI
Specification). This method stores the source configurations as well as the
generated service config. It does NOT apply the service config to any
backend services.
Operation<response: SubmitConfigSourceResponse>
Args:
request: (ServicemanagementServicesConfigsSubmitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Submit')
return self._RunMethod(
config, request, global_params=global_params)
Submit.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='servicemanagement.services.configs.submit',
ordered_params=['serviceName'],
path_params=['serviceName'],
query_params=[],
relative_path='v1/services/{serviceName}/configs:submit',
request_field='submitConfigSourceRequest',
request_type_name='ServicemanagementServicesConfigsSubmitRequest',
response_type_name='Operation',
supports_download=False,
)
class ServicesCustomerSettingsService(base_api.BaseApiService):
"""Service class for the services_customerSettings resource."""
_NAME = 'services_customerSettings'
def __init__(self, client):
super(ServicemanagementV1.ServicesCustomerSettingsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Retrieves the settings that control the specified customer's usage of the.
service.
Args:
request: (ServicemanagementServicesCustomerSettingsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(CustomerSettings) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='servicemanagement.services.customerSettings.get',
ordered_params=['serviceName', 'customerId'],
path_params=['customerId', 'serviceName'],
query_params=['expand', 'view'],
relative_path='v1/services/{serviceName}/customerSettings/{customerId}',
request_field='',
request_type_name='ServicemanagementServicesCustomerSettingsGetRequest',
response_type_name='CustomerSettings',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates specified subset of the settings that control the specified.
customer's usage of the service. Attempts to update a field not
controlled by the caller will result in an access denied error.
Operation<response: CustomerSettings>
Args:
request: (ServicemanagementServicesCustomerSettingsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method='PATCH',
method_id='servicemanagement.services.customerSettings.patch',
ordered_params=['serviceName', 'customerId'],
path_params=['customerId', 'serviceName'],
query_params=['updateMask'],
relative_path='v1/services/{serviceName}/customerSettings/{customerId}',
request_field='customerSettings',
request_type_name='ServicemanagementServicesCustomerSettingsPatchRequest',
response_type_name='Operation',
supports_download=False,
)
class ServicesProjectSettingsService(base_api.BaseApiService):
"""Service class for the services_projectSettings resource."""
_NAME = 'services_projectSettings'
def __init__(self, client):
super(ServicemanagementV1.ServicesProjectSettingsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Retrieves the settings that control the specified consumer project's usage.
of the service.
Args:
request: (ServicemanagementServicesProjectSettingsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ProjectSettings) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='servicemanagement.services.projectSettings.get',
ordered_params=['serviceName', 'consumerProjectId'],
path_params=['consumerProjectId', 'serviceName'],
query_params=['expand', 'view'],
relative_path='v1/services/{serviceName}/projectSettings/{consumerProjectId}',
request_field='',
request_type_name='ServicemanagementServicesProjectSettingsGetRequest',
response_type_name='ProjectSettings',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates specified subset of the settings that control the specified.
consumer project's usage of the service. Attempts to update a field not
controlled by the caller will result in an access denied error.
Operation<response: ProjectSettings>
Args:
request: (ServicemanagementServicesProjectSettingsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method='PATCH',
method_id='servicemanagement.services.projectSettings.patch',
ordered_params=['serviceName', 'consumerProjectId'],
path_params=['consumerProjectId', 'serviceName'],
query_params=['updateMask'],
relative_path='v1/services/{serviceName}/projectSettings/{consumerProjectId}',
request_field='projectSettings',
request_type_name='ServicemanagementServicesProjectSettingsPatchRequest',
response_type_name='Operation',
supports_download=False,
)
def Update(self, request, global_params=None):
r"""NOTE: Currently unsupported. Use PatchProjectSettings instead.
Updates the settings that control the specified consumer project's usage
of the service. Attempts to update a field not controlled by the caller
will result in an access denied error.
Operation<response: ProjectSettings>
Args:
request: (ProjectSettings) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method='PUT',
method_id='servicemanagement.services.projectSettings.update',
ordered_params=['serviceName', 'consumerProjectId'],
path_params=['consumerProjectId', 'serviceName'],
query_params=[],
relative_path='v1/services/{serviceName}/projectSettings/{consumerProjectId}',
request_field='<request>',
request_type_name='ProjectSettings',
response_type_name='Operation',
supports_download=False,
)
class ServicesService(base_api.BaseApiService):
"""Service class for the services resource."""
_NAME = 'services'
def __init__(self, client):
super(ServicemanagementV1.ServicesService, self).__init__(client)
self._upload_configs = {
}
def ConvertConfig(self, request, global_params=None):
r"""DEPRECATED. `SubmitConfigSource` with `validate_only=true` will provide.
config conversion moving forward.
Converts an API specification (e.g. Swagger spec) to an
equivalent `google.api.Service`.
Args:
request: (ConvertConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ConvertConfigResponse) The response message.
"""
config = self.GetMethodConfig('ConvertConfig')
return self._RunMethod(
config, request, global_params=global_params)
ConvertConfig.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='servicemanagement.services.convertConfig',
ordered_params=[],
path_params=[],
query_params=[],
relative_path='v1/services:convertConfig',
request_field='<request>',
request_type_name='ConvertConfigRequest',
response_type_name='ConvertConfigResponse',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a new managed service.
Operation<response: ManagedService>
Args:
request: (ManagedService) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='servicemanagement.services.create',
ordered_params=[],
path_params=[],
query_params=[],
relative_path='v1/services',
request_field='<request>',
request_type_name='ManagedService',
response_type_name='Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a managed service.
Operation<response: google.protobuf.Empty>
Args:
request: (ServicemanagementServicesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method='DELETE',
method_id='servicemanagement.services.delete',
ordered_params=['serviceName'],
path_params=['serviceName'],
query_params=[],
relative_path='v1/services/{serviceName}',
request_field='',
request_type_name='ServicemanagementServicesDeleteRequest',
response_type_name='Operation',
supports_download=False,
)
def Disable(self, request, global_params=None):
r"""Disable a managed service for a project.
Google Service Management will only disable the managed service even if
there are other services depend on the managed service.
Operation<response: DisableServiceResponse>
Args:
request: (ServicemanagementServicesDisableRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Disable')
return self._RunMethod(
config, request, global_params=global_params)
Disable.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='servicemanagement.services.disable',
ordered_params=['serviceName'],
path_params=['serviceName'],
query_params=[],
relative_path='v1/services/{serviceName}:disable',
request_field='disableServiceRequest',
request_type_name='ServicemanagementServicesDisableRequest',
response_type_name='Operation',
supports_download=False,
)
def Enable(self, request, global_params=None):
r"""Enable a managed service for a project with default setting.
If the managed service has dependencies, they will be enabled as well.
Operation<response: EnableServiceResponse>
Args:
request: (ServicemanagementServicesEnableRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Enable')
return self._RunMethod(
config, request, global_params=global_params)
Enable.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='servicemanagement.services.enable',
ordered_params=['serviceName'],
path_params=['serviceName'],
query_params=[],
relative_path='v1/services/{serviceName}:enable',
request_field='enableServiceRequest',
request_type_name='ServicemanagementServicesEnableRequest',
response_type_name='Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a managed service. If the `consumer_project_id` is specified,.
the project's settings for the specified service are also returned.
Args:
request: (ServicemanagementServicesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ManagedService) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='servicemanagement.services.get',
ordered_params=['serviceName'],
path_params=['serviceName'],
query_params=['consumerProjectId', 'expand', 'view'],
relative_path='v1/services/{serviceName}',
request_field='',
request_type_name='ServicemanagementServicesGetRequest',
response_type_name='ManagedService',
supports_download=False,
)
def GetAccessPolicy(self, request, global_params=None):
r"""Producer method to retrieve current policy.
Args:
request: (ServicemanagementServicesGetAccessPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ServiceAccessPolicy) The response message.
"""
config = self.GetMethodConfig('GetAccessPolicy')
return self._RunMethod(
config, request, global_params=global_params)
GetAccessPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='servicemanagement.services.getAccessPolicy',
ordered_params=['serviceName'],
path_params=['serviceName'],
query_params=[],
relative_path='v1/services/{serviceName}/accessPolicy',
request_field='',
request_type_name='ServicemanagementServicesGetAccessPolicyRequest',
response_type_name='ServiceAccessPolicy',
supports_download=False,
)
def GetConfig(self, request, global_params=None):
r"""Gets a service config (version) for a managed service. If `config_id` is.
not specified, the latest service config will be returned.
Args:
request: (ServicemanagementServicesGetConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Service) The response message.
"""
config = self.GetMethodConfig('GetConfig')
return self._RunMethod(
config, request, global_params=global_params)
GetConfig.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='servicemanagement.services.getConfig',
ordered_params=['serviceName'],
path_params=['serviceName'],
query_params=['configId'],
relative_path='v1/services/{serviceName}/config',
request_field='',
request_type_name='ServicemanagementServicesGetConfigRequest',
response_type_name='Service',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists all managed services. If the `consumer_project_id` is specified,.
the project's settings for the specified service are also returned.
Args:
request: (ServicemanagementServicesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListServicesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='servicemanagement.services.list',
ordered_params=[],
path_params=[],
query_params=['category', 'consumerProjectId', 'expand', 'pageSize', 'pageToken', 'producerProjectId'],
relative_path='v1/services',
request_field='',
request_type_name='ServicemanagementServicesListRequest',
response_type_name='ListServicesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the specified subset of the configuration. If the specified service.
does not exists the patch operation fails.
Operation<response: ManagedService>
Args:
request: (ServicemanagementServicesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method='PATCH',
method_id='servicemanagement.services.patch',
ordered_params=['serviceName'],
path_params=['serviceName'],
query_params=['updateMask'],
relative_path='v1/services/{serviceName}',
request_field='managedService',
request_type_name='ServicemanagementServicesPatchRequest',
response_type_name='Operation',
supports_download=False,
)
def PatchConfig(self, request, global_params=None):
r"""Updates the specified subset of the service resource. Equivalent to.
calling `PatchService` with only the `service_config` field updated.
Operation<response: google.api.Service>
Args:
request: (ServicemanagementServicesPatchConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('PatchConfig')
return self._RunMethod(
config, request, global_params=global_params)
PatchConfig.method_config = lambda: base_api.ApiMethodInfo(
http_method='PATCH',
method_id='servicemanagement.services.patchConfig',
ordered_params=['serviceName'],
path_params=['serviceName'],
query_params=['updateMask'],
relative_path='v1/services/{serviceName}/config',
request_field='service',
request_type_name='ServicemanagementServicesPatchConfigRequest',
response_type_name='Operation',
supports_download=False,
)
def Update(self, request, global_params=None):
r"""Updates the configuration of a service. If the specified service does not.
already exist, then it is created.
Operation<response: ManagedService>
Args:
request: (ServicemanagementServicesUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method='PUT',
method_id='servicemanagement.services.update',
ordered_params=['serviceName'],
path_params=['serviceName'],
query_params=['updateMask'],
relative_path='v1/services/{serviceName}',
request_field='managedService',
request_type_name='ServicemanagementServicesUpdateRequest',
response_type_name='Operation',
supports_download=False,
)
def UpdateAccessPolicy(self, request, global_params=None):
r"""Producer method to update the current policy. This method will return an.
error if the policy is too large (more than 50 entries across all lists).
Args:
request: (ServiceAccessPolicy) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ServiceAccessPolicy) The response message.
"""
config = self.GetMethodConfig('UpdateAccessPolicy')
return self._RunMethod(
config, request, global_params=global_params)
UpdateAccessPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method='PUT',
method_id='servicemanagement.services.updateAccessPolicy',
ordered_params=['serviceName'],
path_params=['serviceName'],
query_params=[],
relative_path='v1/services/{serviceName}/accessPolicy',
request_field='<request>',
request_type_name='ServiceAccessPolicy',
response_type_name='ServiceAccessPolicy',
supports_download=False,
)
def UpdateConfig(self, request, global_params=None):
r"""Updates the specified subset of the service resource. Equivalent to.
calling `UpdateService` with only the `service_config` field updated.
Operation<response: google.api.Service>
Args:
request: (ServicemanagementServicesUpdateConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('UpdateConfig')
return self._RunMethod(
config, request, global_params=global_params)
UpdateConfig.method_config = lambda: base_api.ApiMethodInfo(
http_method='PUT',
method_id='servicemanagement.services.updateConfig',
ordered_params=['serviceName'],
path_params=['serviceName'],
query_params=['updateMask'],
relative_path='v1/services/{serviceName}/config',
request_field='service',
request_type_name='ServicemanagementServicesUpdateConfigRequest',
response_type_name='Operation',
supports_download=False,
)
class V1Service(base_api.BaseApiService):
"""Service class for the v1 resource."""
_NAME = 'v1'
def __init__(self, client):
super(ServicemanagementV1.V1Service, self).__init__(client)
self._upload_configs = {
}
def ConvertConfig(self, request, global_params=None):
r"""DEPRECATED. `SubmitConfigSource` with `validate_only=true` will provide.
config conversion moving forward.
Converts an API specification (e.g. Swagger spec) to an
equivalent `google.api.Service`.
Args:
request: (ConvertConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ConvertConfigResponse) The response message.
"""
config = self.GetMethodConfig('ConvertConfig')
return self._RunMethod(
config, request, global_params=global_params)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | true |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/storage_sample/uploads_test.py | samples/storage_sample/uploads_test.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration tests for uploading and downloading to GCS.
These tests exercise most of the corner cases for upload/download of
files in apitools, via GCS. There are no performance tests here yet.
"""
import json
import os
import random
import string
import unittest
import six
from apitools.base.py import transfer
import storage
_CLIENT = None
def _GetClient():
global _CLIENT # pylint: disable=global-statement
if _CLIENT is None:
_CLIENT = storage.StorageV1()
return _CLIENT
class UploadsTest(unittest.TestCase):
_DEFAULT_BUCKET = 'apitools'
_TESTDATA_PREFIX = 'uploads'
def setUp(self):
self.__client = _GetClient()
self.__files = []
self.__content = ''
self.__buffer = None
self.__upload = None
def tearDown(self):
self.__DeleteFiles()
def __ResetUpload(self, size, auto_transfer=True):
self.__content = ''.join(
random.choice(string.ascii_letters) for _ in range(size))
self.__buffer = six.StringIO(self.__content)
self.__upload = storage.Upload.FromStream(
self.__buffer, 'text/plain', auto_transfer=auto_transfer)
def __DeleteFiles(self):
for filename in self.__files:
self.__DeleteFile(filename)
def __DeleteFile(self, filename):
object_name = os.path.join(self._TESTDATA_PREFIX, filename)
req = storage.StorageObjectsDeleteRequest(
bucket=self._DEFAULT_BUCKET, object=object_name)
self.__client.objects.Delete(req)
def __InsertRequest(self, filename):
object_name = os.path.join(self._TESTDATA_PREFIX, filename)
return storage.StorageObjectsInsertRequest(
name=object_name, bucket=self._DEFAULT_BUCKET)
def __GetRequest(self, filename):
object_name = os.path.join(self._TESTDATA_PREFIX, filename)
return storage.StorageObjectsGetRequest(
object=object_name, bucket=self._DEFAULT_BUCKET)
def __InsertFile(self, filename, request=None):
if request is None:
request = self.__InsertRequest(filename)
response = self.__client.objects.Insert(request, upload=self.__upload)
self.assertIsNotNone(response)
self.__files.append(filename)
return response
def testZeroBytes(self):
filename = 'zero_byte_file'
self.__ResetUpload(0)
response = self.__InsertFile(filename)
self.assertEqual(0, response.size)
def testSimpleUpload(self):
filename = 'fifteen_byte_file'
self.__ResetUpload(15)
response = self.__InsertFile(filename)
self.assertEqual(15, response.size)
def testMultipartUpload(self):
filename = 'fifteen_byte_file'
self.__ResetUpload(15)
request = self.__InsertRequest(filename)
request.object = storage.Object(contentLanguage='en')
response = self.__InsertFile(filename, request=request)
self.assertEqual(15, response.size)
self.assertEqual('en', response.contentLanguage)
def testAutoUpload(self):
filename = 'ten_meg_file'
size = 10 << 20
self.__ResetUpload(size)
request = self.__InsertRequest(filename)
response = self.__InsertFile(filename, request=request)
self.assertEqual(size, response.size)
def testStreamMedia(self):
filename = 'ten_meg_file'
size = 10 << 20
self.__ResetUpload(size, auto_transfer=False)
self.__upload.strategy = 'resumable'
self.__upload.total_size = size
request = self.__InsertRequest(filename)
initial_response = self.__client.objects.Insert(
request, upload=self.__upload)
self.assertIsNotNone(initial_response)
self.assertEqual(0, self.__buffer.tell())
self.__upload.StreamMedia()
self.assertEqual(size, self.__buffer.tell())
def testBreakAndResumeUpload(self):
filename = ('ten_meg_file_' +
''.join(random.sample(string.ascii_letters, 5)))
size = 10 << 20
self.__ResetUpload(size, auto_transfer=False)
self.__upload.strategy = 'resumable'
self.__upload.total_size = size
# Start the upload
request = self.__InsertRequest(filename)
initial_response = self.__client.objects.Insert(
request, upload=self.__upload)
self.assertIsNotNone(initial_response)
self.assertEqual(0, self.__buffer.tell())
# Pretend the process died, and resume with a new attempt at the
# same upload.
upload_data = json.dumps(self.__upload.serialization_data)
second_upload_attempt = transfer.Upload.FromData(
self.__buffer, upload_data, self.__upload.http)
second_upload_attempt._Upload__SendChunk(0)
self.assertEqual(second_upload_attempt.chunksize, self.__buffer.tell())
# Simulate a third try, and stream from there.
final_upload_attempt = transfer.Upload.FromData(
self.__buffer, upload_data, self.__upload.http)
final_upload_attempt.StreamInChunks()
self.assertEqual(size, self.__buffer.tell())
# Verify the upload
object_info = self.__client.objects.Get(self.__GetRequest(filename))
self.assertEqual(size, object_info.size)
# Confirm that a new attempt successfully does nothing.
completed_upload_attempt = transfer.Upload.FromData(
self.__buffer, upload_data, self.__upload.http)
self.assertTrue(completed_upload_attempt.complete)
completed_upload_attempt.StreamInChunks()
# Verify the upload didn't pick up extra bytes.
object_info = self.__client.objects.Get(self.__GetRequest(filename))
self.assertEqual(size, object_info.size)
# TODO(craigcitro): Add tests for callbacks (especially around
# finish callback).
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/storage_sample/downloads_test.py | samples/storage_sample/downloads_test.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration tests for uploading and downloading to GCS.
These tests exercise most of the corner cases for upload/download of
files in apitools, via GCS. There are no performance tests here yet.
"""
import json
import os
import unittest
import six
from apitools.base.py import exceptions
import storage
_CLIENT = None
def _GetClient():
global _CLIENT # pylint: disable=global-statement
if _CLIENT is None:
_CLIENT = storage.StorageV1()
return _CLIENT
class DownloadsTest(unittest.TestCase):
_DEFAULT_BUCKET = 'apitools'
_TESTDATA_PREFIX = 'testdata'
def setUp(self):
self.__client = _GetClient()
self.__ResetDownload()
def __ResetDownload(self, auto_transfer=False):
self.__buffer = six.StringIO()
self.__download = storage.Download.FromStream(
self.__buffer, auto_transfer=auto_transfer)
def __GetTestdataFileContents(self, filename):
file_path = os.path.join(
os.path.dirname(__file__), self._TESTDATA_PREFIX, filename)
file_contents = open(file_path).read()
self.assertIsNotNone(
file_contents, msg=('Could not read file %s' % filename))
return file_contents
@classmethod
def __GetRequest(cls, filename):
object_name = os.path.join(cls._TESTDATA_PREFIX, filename)
return storage.StorageObjectsGetRequest(
bucket=cls._DEFAULT_BUCKET, object=object_name)
def __GetFile(self, request):
response = self.__client.objects.Get(request, download=self.__download)
self.assertIsNone(response, msg=(
'Unexpected nonempty response for file download: %s' % response))
def __GetAndStream(self, request):
self.__GetFile(request)
self.__download.StreamInChunks()
def testZeroBytes(self):
request = self.__GetRequest('zero_byte_file')
self.__GetAndStream(request)
self.assertEqual(0, self.__buffer.tell())
def testObjectDoesNotExist(self):
self.__ResetDownload(auto_transfer=True)
with self.assertRaises(exceptions.HttpError):
self.__GetFile(self.__GetRequest('nonexistent_file'))
def testAutoTransfer(self):
self.__ResetDownload(auto_transfer=True)
self.__GetFile(self.__GetRequest('fifteen_byte_file'))
file_contents = self.__GetTestdataFileContents('fifteen_byte_file')
self.assertEqual(15, self.__buffer.tell())
self.__buffer.seek(0)
self.assertEqual(file_contents, self.__buffer.read())
def testFilenameWithSpaces(self):
self.__ResetDownload(auto_transfer=True)
self.__GetFile(self.__GetRequest('filename with spaces'))
# NOTE(craigcitro): We add _ here to make this play nice with blaze.
file_contents = self.__GetTestdataFileContents('filename_with_spaces')
self.assertEqual(15, self.__buffer.tell())
self.__buffer.seek(0)
self.assertEqual(file_contents, self.__buffer.read())
def testGetRange(self):
# TODO(craigcitro): Test about a thousand more corner cases.
file_contents = self.__GetTestdataFileContents('fifteen_byte_file')
self.__GetFile(self.__GetRequest('fifteen_byte_file'))
self.__download.GetRange(5, 10)
self.assertEqual(6, self.__buffer.tell())
self.__buffer.seek(0)
self.assertEqual(file_contents[5:11], self.__buffer.read())
def testGetRangeWithNegativeStart(self):
file_contents = self.__GetTestdataFileContents('fifteen_byte_file')
self.__GetFile(self.__GetRequest('fifteen_byte_file'))
self.__download.GetRange(-3)
self.assertEqual(3, self.__buffer.tell())
self.__buffer.seek(0)
self.assertEqual(file_contents[-3:], self.__buffer.read())
def testGetRangeWithPositiveStart(self):
file_contents = self.__GetTestdataFileContents('fifteen_byte_file')
self.__GetFile(self.__GetRequest('fifteen_byte_file'))
self.__download.GetRange(2)
self.assertEqual(13, self.__buffer.tell())
self.__buffer.seek(0)
self.assertEqual(file_contents[2:15], self.__buffer.read())
def testSmallChunksizes(self):
file_contents = self.__GetTestdataFileContents('fifteen_byte_file')
request = self.__GetRequest('fifteen_byte_file')
for chunksize in (2, 3, 15, 100):
self.__ResetDownload()
self.__download.chunksize = chunksize
self.__GetAndStream(request)
self.assertEqual(15, self.__buffer.tell())
self.__buffer.seek(0)
self.assertEqual(file_contents, self.__buffer.read(15))
def testLargeFileChunksizes(self):
request = self.__GetRequest('thirty_meg_file')
for chunksize in (1048576, 40 * 1048576):
self.__ResetDownload()
self.__download.chunksize = chunksize
self.__GetAndStream(request)
self.__buffer.seek(0)
def testAutoGzipObject(self):
# TODO(craigcitro): Move this to a new object once we have a more
# permanent one, see: http://b/12250275
request = storage.StorageObjectsGetRequest(
bucket='ottenl-gzip', object='50K.txt')
# First, try without auto-transfer.
self.__GetFile(request)
self.assertEqual(0, self.__buffer.tell())
self.__download.StreamInChunks()
self.assertEqual(50000, self.__buffer.tell())
# Next, try with auto-transfer.
self.__ResetDownload(auto_transfer=True)
self.__GetFile(request)
self.assertEqual(50000, self.__buffer.tell())
def testSmallGzipObject(self):
request = self.__GetRequest('zero-gzipd.html')
self.__GetFile(request)
self.assertEqual(0, self.__buffer.tell())
additional_headers = {'accept-encoding': 'gzip, deflate'}
self.__download.StreamInChunks(additional_headers=additional_headers)
self.assertEqual(0, self.__buffer.tell())
def testSerializedDownload(self):
def _ProgressCallback(unused_response, download_object):
print('Progress %s' % download_object.progress)
file_contents = self.__GetTestdataFileContents('fifteen_byte_file')
object_name = os.path.join(self._TESTDATA_PREFIX, 'fifteen_byte_file')
request = storage.StorageObjectsGetRequest(
bucket=self._DEFAULT_BUCKET, object=object_name)
response = self.__client.objects.Get(request)
# pylint: disable=attribute-defined-outside-init
self.__buffer = six.StringIO()
download_data = json.dumps({
'auto_transfer': False,
'progress': 0,
'total_size': response.size,
'url': response.mediaLink,
})
self.__download = storage.Download.FromData(
self.__buffer, download_data, http=self.__client.http)
self.__download.StreamInChunks(callback=_ProgressCallback)
self.assertEqual(15, self.__buffer.tell())
self.__buffer.seek(0)
self.assertEqual(file_contents, self.__buffer.read(15))
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/storage_sample/storage_v1/storage_v1_messages.py | samples/storage_sample/storage_v1/storage_v1_messages.py | """Generated message classes for storage version v1.
Stores and retrieves potentially large, immutable data objects.
"""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.protorpclite import message_types as _message_types
from apitools.base.protorpclite import messages as _messages
from apitools.base.py import encoding
from apitools.base.py import extra_types
package = 'storage'
class Bucket(_messages.Message):
r"""A bucket.
Messages:
CorsValueListEntry: A CorsValueListEntry object.
LifecycleValue: The bucket's lifecycle configuration. See lifecycle
management for more information.
LoggingValue: The bucket's logging configuration, which defines the
destination bucket and optional name prefix for the current bucket's
logs.
OwnerValue: The owner of the bucket. This is always the project team's
owner group.
VersioningValue: The bucket's versioning configuration.
WebsiteValue: The bucket's website configuration, controlling how the
service behaves when accessing bucket contents as a web site. See the
Static Website Examples for more information.
Fields:
acl: Access controls on the bucket.
cors: The bucket's Cross-Origin Resource Sharing (CORS) configuration.
defaultObjectAcl: Default access controls to apply to new objects when no
ACL is provided.
etag: HTTP 1.1 Entity tag for the bucket.
id: The ID of the bucket.
kind: The kind of item this is. For buckets, this is always
storage#bucket.
lifecycle: The bucket's lifecycle configuration. See lifecycle management
for more information.
location: The location of the bucket. Object data for objects in the
bucket resides in physical storage within this region. Defaults to US.
See the developer's guide for the authoritative list.
logging: The bucket's logging configuration, which defines the destination
bucket and optional name prefix for the current bucket's logs.
metageneration: The metadata generation of this bucket.
name: The name of the bucket.
owner: The owner of the bucket. This is always the project team's owner
group.
projectNumber: The project number of the project the bucket belongs to.
selfLink: The URI of this bucket.
storageClass: The bucket's storage class. This defines how objects in the
bucket are stored and determines the SLA and the cost of storage. Values
include STANDARD, NEARLINE and DURABLE_REDUCED_AVAILABILITY. Defaults to
STANDARD. For more information, see storage classes.
timeCreated: The creation time of the bucket in RFC 3339 format.
updated: The modification time of the bucket in RFC 3339 format.
versioning: The bucket's versioning configuration.
website: The bucket's website configuration, controlling how the service
behaves when accessing bucket contents as a web site. See the Static
Website Examples for more information.
"""
class CorsValueListEntry(_messages.Message):
r"""A CorsValueListEntry object.
Fields:
maxAgeSeconds: The value, in seconds, to return in the Access-Control-
Max-Age header used in preflight responses.
method: The list of HTTP methods on which to include CORS response
headers, (GET, OPTIONS, POST, etc) Note: "*" is permitted in the list
of methods, and means "any method".
origin: The list of Origins eligible to receive CORS response headers.
Note: "*" is permitted in the list of origins, and means "any Origin".
responseHeader: The list of HTTP headers other than the simple response
headers to give permission for the user-agent to share across domains.
"""
maxAgeSeconds = _messages.IntegerField(1, variant=_messages.Variant.INT32)
method = _messages.StringField(2, repeated=True)
origin = _messages.StringField(3, repeated=True)
responseHeader = _messages.StringField(4, repeated=True)
class LifecycleValue(_messages.Message):
r"""The bucket's lifecycle configuration. See lifecycle management for
more information.
Messages:
RuleValueListEntry: A RuleValueListEntry object.
Fields:
rule: A lifecycle management rule, which is made of an action to take
and the condition(s) under which the action will be taken.
"""
class RuleValueListEntry(_messages.Message):
r"""A RuleValueListEntry object.
Messages:
ActionValue: The action to take.
ConditionValue: The condition(s) under which the action will be taken.
Fields:
action: The action to take.
condition: The condition(s) under which the action will be taken.
"""
class ActionValue(_messages.Message):
r"""The action to take.
Fields:
type: Type of the action. Currently, only Delete is supported.
"""
type = _messages.StringField(1)
class ConditionValue(_messages.Message):
r"""The condition(s) under which the action will be taken.
Fields:
age: Age of an object (in days). This condition is satisfied when an
object reaches the specified age.
createdBefore: A date in RFC 3339 format with only the date part
(for instance, "2013-01-15"). This condition is satisfied when an
object is created before midnight of the specified date in UTC.
isLive: Relevant only for versioned objects. If the value is true,
this condition matches live objects; if the value is false, it
matches archived objects.
numNewerVersions: Relevant only for versioned objects. If the value
is N, this condition is satisfied when there are at least N
versions (including the live version) newer than this version of
the object.
"""
age = _messages.IntegerField(1, variant=_messages.Variant.INT32)
createdBefore = extra_types.DateField(2)
isLive = _messages.BooleanField(3)
numNewerVersions = _messages.IntegerField(4, variant=_messages.Variant.INT32)
action = _messages.MessageField('ActionValue', 1)
condition = _messages.MessageField('ConditionValue', 2)
rule = _messages.MessageField('RuleValueListEntry', 1, repeated=True)
class LoggingValue(_messages.Message):
r"""The bucket's logging configuration, which defines the destination
bucket and optional name prefix for the current bucket's logs.
Fields:
logBucket: The destination bucket where the current bucket's logs should
be placed.
logObjectPrefix: A prefix for log object names.
"""
logBucket = _messages.StringField(1)
logObjectPrefix = _messages.StringField(2)
class OwnerValue(_messages.Message):
r"""The owner of the bucket. This is always the project team's owner
group.
Fields:
entity: The entity, in the form project-owner-projectId.
entityId: The ID for the entity.
"""
entity = _messages.StringField(1)
entityId = _messages.StringField(2)
class VersioningValue(_messages.Message):
r"""The bucket's versioning configuration.
Fields:
enabled: While set to true, versioning is fully enabled for this bucket.
"""
enabled = _messages.BooleanField(1)
class WebsiteValue(_messages.Message):
r"""The bucket's website configuration, controlling how the service
behaves when accessing bucket contents as a web site. See the Static
Website Examples for more information.
Fields:
mainPageSuffix: If the requested object path is missing, the service
will ensure the path has a trailing '/', append this suffix, and
attempt to retrieve the resulting object. This allows the creation of
index.html objects to represent directory pages.
notFoundPage: If the requested object path is missing, and any
mainPageSuffix object is missing, if applicable, the service will
return the named object from this bucket as the content for a 404 Not
Found result.
"""
mainPageSuffix = _messages.StringField(1)
notFoundPage = _messages.StringField(2)
acl = _messages.MessageField('BucketAccessControl', 1, repeated=True)
cors = _messages.MessageField('CorsValueListEntry', 2, repeated=True)
defaultObjectAcl = _messages.MessageField('ObjectAccessControl', 3, repeated=True)
etag = _messages.StringField(4)
id = _messages.StringField(5)
kind = _messages.StringField(6, default='storage#bucket')
lifecycle = _messages.MessageField('LifecycleValue', 7)
location = _messages.StringField(8)
logging = _messages.MessageField('LoggingValue', 9)
metageneration = _messages.IntegerField(10)
name = _messages.StringField(11)
owner = _messages.MessageField('OwnerValue', 12)
projectNumber = _messages.IntegerField(13, variant=_messages.Variant.UINT64)
selfLink = _messages.StringField(14)
storageClass = _messages.StringField(15)
timeCreated = _message_types.DateTimeField(16)
updated = _message_types.DateTimeField(17)
versioning = _messages.MessageField('VersioningValue', 18)
website = _messages.MessageField('WebsiteValue', 19)
class BucketAccessControl(_messages.Message):
r"""An access-control entry.
Messages:
ProjectTeamValue: The project team associated with the entity, if any.
Fields:
bucket: The name of the bucket.
domain: The domain associated with the entity, if any.
email: The email address associated with the entity, if any.
entity: The entity holding the permission, in one of the following forms:
- user-userId - user-email - group-groupId - group-email - domain-
domain - project-team-projectId - allUsers - allAuthenticatedUsers
Examples: - The user liz@example.com would be user-liz@example.com. -
The group example@googlegroups.com would be group-
example@googlegroups.com. - To refer to all members of the Google Apps
for Business domain example.com, the entity would be domain-example.com.
entityId: The ID for the entity, if any.
etag: HTTP 1.1 Entity tag for the access-control entry.
id: The ID of the access-control entry.
kind: The kind of item this is. For bucket access control entries, this is
always storage#bucketAccessControl.
projectTeam: The project team associated with the entity, if any.
role: The access permission for the entity. Can be READER, WRITER, or
OWNER.
selfLink: The link to this access-control entry.
"""
class ProjectTeamValue(_messages.Message):
r"""The project team associated with the entity, if any.
Fields:
projectNumber: The project number.
team: The team. Can be owners, editors, or viewers.
"""
projectNumber = _messages.StringField(1)
team = _messages.StringField(2)
bucket = _messages.StringField(1)
domain = _messages.StringField(2)
email = _messages.StringField(3)
entity = _messages.StringField(4)
entityId = _messages.StringField(5)
etag = _messages.StringField(6)
id = _messages.StringField(7)
kind = _messages.StringField(8, default='storage#bucketAccessControl')
projectTeam = _messages.MessageField('ProjectTeamValue', 9)
role = _messages.StringField(10)
selfLink = _messages.StringField(11)
class BucketAccessControls(_messages.Message):
r"""An access-control list.
Fields:
items: The list of items.
kind: The kind of item this is. For lists of bucket access control
entries, this is always storage#bucketAccessControls.
"""
items = _messages.MessageField('BucketAccessControl', 1, repeated=True)
kind = _messages.StringField(2, default='storage#bucketAccessControls')
class Buckets(_messages.Message):
r"""A list of buckets.
Fields:
items: The list of items.
kind: The kind of item this is. For lists of buckets, this is always
storage#buckets.
nextPageToken: The continuation token, used to page through large result
sets. Provide this value in a subsequent request to return the next page
of results.
"""
items = _messages.MessageField('Bucket', 1, repeated=True)
kind = _messages.StringField(2, default='storage#buckets')
nextPageToken = _messages.StringField(3)
class Channel(_messages.Message):
r"""An notification channel used to watch for resource changes.
Messages:
ParamsValue: Additional parameters controlling delivery channel behavior.
Optional.
Fields:
address: The address where notifications are delivered for this channel.
expiration: Date and time of notification channel expiration, expressed as
a Unix timestamp, in milliseconds. Optional.
id: A UUID or similar unique string that identifies this channel.
kind: Identifies this as a notification channel used to watch for changes
to a resource. Value: the fixed string "api#channel".
params: Additional parameters controlling delivery channel behavior.
Optional.
payload: A Boolean value to indicate whether payload is wanted. Optional.
resourceId: An opaque ID that identifies the resource being watched on
this channel. Stable across different API versions.
resourceUri: A version-specific identifier for the watched resource.
token: An arbitrary string delivered to the target address with each
notification delivered over this channel. Optional.
type: The type of delivery mechanism used for this channel.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class ParamsValue(_messages.Message):
r"""Additional parameters controlling delivery channel behavior. Optional.
Messages:
AdditionalProperty: An additional property for a ParamsValue object.
Fields:
additionalProperties: Declares a new parameter by name.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a ParamsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
address = _messages.StringField(1)
expiration = _messages.IntegerField(2)
id = _messages.StringField(3)
kind = _messages.StringField(4, default='api#channel')
params = _messages.MessageField('ParamsValue', 5)
payload = _messages.BooleanField(6)
resourceId = _messages.StringField(7)
resourceUri = _messages.StringField(8)
token = _messages.StringField(9)
type = _messages.StringField(10)
class ComposeRequest(_messages.Message):
r"""A Compose request.
Messages:
SourceObjectsValueListEntry: A SourceObjectsValueListEntry object.
Fields:
destination: Properties of the resulting object.
kind: The kind of item this is.
sourceObjects: The list of source objects that will be concatenated into a
single object.
"""
class SourceObjectsValueListEntry(_messages.Message):
r"""A SourceObjectsValueListEntry object.
Messages:
ObjectPreconditionsValue: Conditions that must be met for this operation
to execute.
Fields:
generation: The generation of this object to use as the source.
name: The source object's name. The source object's bucket is implicitly
the destination bucket.
objectPreconditions: Conditions that must be met for this operation to
execute.
"""
class ObjectPreconditionsValue(_messages.Message):
r"""Conditions that must be met for this operation to execute.
Fields:
ifGenerationMatch: Only perform the composition if the generation of
the source object that would be used matches this value. If this
value and a generation are both specified, they must be the same
value or the call will fail.
"""
ifGenerationMatch = _messages.IntegerField(1)
generation = _messages.IntegerField(1)
name = _messages.StringField(2)
objectPreconditions = _messages.MessageField('ObjectPreconditionsValue', 3)
destination = _messages.MessageField('Object', 1)
kind = _messages.StringField(2, default='storage#composeRequest')
sourceObjects = _messages.MessageField('SourceObjectsValueListEntry', 3, repeated=True)
class Notification(_messages.Message):
r"""A subscription to receive Google PubSub notifications.
Messages:
CustomAttributesValue: An optional list of additional attributes to attach
to each Cloud PubSub message published for this notification
subscription.
Fields:
bucket: The name of the bucket this subscription is particular to.
custom_attributes: An optional list of additional attributes to attach to
each Cloud PubSub message published for this notification subscription.
etag: HTTP 1.1 Entity tag for this subscription notification.
event_types: If present, only send notifications about listed event types.
If empty, sent notifications for all event types.
id: The ID of the notification.
kind: The kind of item this is. For notifications, this is always
storage#notification.
object_metadata_format: If payload_content is OBJECT_METADATA, controls
the format of that metadata. Otherwise, must not be set.
object_name_prefix: If present, only apply this notification configuration
to object names that begin with this prefix.
payload_content: The desired content of the Payload. Defaults to
OBJECT_METADATA.
selfLink: The canonical URL of this notification.
topic: The Cloud PubSub topic to which this subscription publishes.
Formatted as: '//pubsub.googleapis.com/projects/{project-
identifier}/topics/{my-topic}'
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class CustomAttributesValue(_messages.Message):
r"""An optional list of additional attributes to attach to each Cloud
PubSub message published for this notification subscription.
Messages:
AdditionalProperty: An additional property for a CustomAttributesValue
object.
Fields:
additionalProperties: Additional properties of type
CustomAttributesValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a CustomAttributesValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
bucket = _messages.StringField(1)
custom_attributes = _messages.MessageField('CustomAttributesValue', 2)
etag = _messages.StringField(3)
event_types = _messages.StringField(4, repeated=True)
id = _messages.StringField(5)
kind = _messages.StringField(6, default='storage#notification')
object_metadata_format = _messages.StringField(7, default='JSON_API_V1')
object_name_prefix = _messages.StringField(8)
payload_content = _messages.StringField(9, default='OBJECT_METADATA')
selfLink = _messages.StringField(10)
topic = _messages.StringField(11)
class Notifications(_messages.Message):
r"""A list of notification subscriptions.
Fields:
items: The list of items.
kind: The kind of item this is. For lists of notifications, this is always
storage#notifications.
"""
items = _messages.MessageField('Notification', 1, repeated=True)
kind = _messages.StringField(2, default='storage#notifications')
class Object(_messages.Message):
r"""An object.
Messages:
CustomerEncryptionValue: Metadata of customer-supplied encryption key, if
the object is encrypted by such a key.
MetadataValue: User-provided metadata, in key/value pairs.
OwnerValue: The owner of the object. This will always be the uploader of
the object.
Fields:
acl: Access controls on the object.
bucket: The name of the bucket containing this object.
cacheControl: Cache-Control directive for the object data.
componentCount: Number of underlying components that make up this object.
Components are accumulated by compose operations.
contentDisposition: Content-Disposition of the object data.
contentEncoding: Content-Encoding of the object data.
contentLanguage: Content-Language of the object data.
contentType: Content-Type of the object data. If contentType is not
specified, object downloads will be served as application/octet-stream.
crc32c: CRC32c checksum, as described in RFC 4960, Appendix B; encoded
using base64 in big-endian byte order. For more information about using
the CRC32c checksum, see Hashes and ETags: Best Practices.
customerEncryption: Metadata of customer-supplied encryption key, if the
object is encrypted by such a key.
etag: HTTP 1.1 Entity tag for the object.
generation: The content generation of this object. Used for object
versioning.
id: The ID of the object.
kind: The kind of item this is. For objects, this is always
storage#object.
md5Hash: MD5 hash of the data; encoded using base64. For more information
about using the MD5 hash, see Hashes and ETags: Best Practices.
mediaLink: Media download link.
metadata: User-provided metadata, in key/value pairs.
metageneration: The version of the metadata for this object at this
generation. Used for preconditions and for detecting changes in
metadata. A metageneration number is only meaningful in the context of a
particular generation of a particular object.
name: The name of this object. Required if not specified by URL parameter.
owner: The owner of the object. This will always be the uploader of the
object.
selfLink: The link to this object.
size: Content-Length of the data in bytes.
storageClass: Storage class of the object.
timeCreated: The creation time of the object in RFC 3339 format.
timeDeleted: The deletion time of the object in RFC 3339 format. Will be
returned if and only if this version of the object has been deleted.
updated: The modification time of the object metadata in RFC 3339 format.
"""
class CustomerEncryptionValue(_messages.Message):
r"""Metadata of customer-supplied encryption key, if the object is
encrypted by such a key.
Fields:
encryptionAlgorithm: The encryption algorithm.
keySha256: SHA256 hash value of the encryption key.
"""
encryptionAlgorithm = _messages.StringField(1)
keySha256 = _messages.StringField(2)
@encoding.MapUnrecognizedFields('additionalProperties')
class MetadataValue(_messages.Message):
r"""User-provided metadata, in key/value pairs.
Messages:
AdditionalProperty: An additional property for a MetadataValue object.
Fields:
additionalProperties: An individual metadata entry.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a MetadataValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
class OwnerValue(_messages.Message):
r"""The owner of the object. This will always be the uploader of the
object.
Fields:
entity: The entity, in the form user-userId.
entityId: The ID for the entity.
"""
entity = _messages.StringField(1)
entityId = _messages.StringField(2)
acl = _messages.MessageField('ObjectAccessControl', 1, repeated=True)
bucket = _messages.StringField(2)
cacheControl = _messages.StringField(3)
componentCount = _messages.IntegerField(4, variant=_messages.Variant.INT32)
contentDisposition = _messages.StringField(5)
contentEncoding = _messages.StringField(6)
contentLanguage = _messages.StringField(7)
contentType = _messages.StringField(8)
crc32c = _messages.StringField(9)
customerEncryption = _messages.MessageField('CustomerEncryptionValue', 10)
etag = _messages.StringField(11)
generation = _messages.IntegerField(12)
id = _messages.StringField(13)
kind = _messages.StringField(14, default='storage#object')
md5Hash = _messages.StringField(15)
mediaLink = _messages.StringField(16)
metadata = _messages.MessageField('MetadataValue', 17)
metageneration = _messages.IntegerField(18)
name = _messages.StringField(19)
owner = _messages.MessageField('OwnerValue', 20)
selfLink = _messages.StringField(21)
size = _messages.IntegerField(22, variant=_messages.Variant.UINT64)
storageClass = _messages.StringField(23)
timeCreated = _message_types.DateTimeField(24)
timeDeleted = _message_types.DateTimeField(25)
updated = _message_types.DateTimeField(26)
class ObjectAccessControl(_messages.Message):
r"""An access-control entry.
Messages:
ProjectTeamValue: The project team associated with the entity, if any.
Fields:
bucket: The name of the bucket.
domain: The domain associated with the entity, if any.
email: The email address associated with the entity, if any.
entity: The entity holding the permission, in one of the following forms:
- user-userId - user-email - group-groupId - group-email - domain-
domain - project-team-projectId - allUsers - allAuthenticatedUsers
Examples: - The user liz@example.com would be user-liz@example.com. -
The group example@googlegroups.com would be group-
example@googlegroups.com. - To refer to all members of the Google Apps
for Business domain example.com, the entity would be domain-example.com.
entityId: The ID for the entity, if any.
etag: HTTP 1.1 Entity tag for the access-control entry.
generation: The content generation of the object.
id: The ID of the access-control entry.
kind: The kind of item this is. For object access control entries, this is
always storage#objectAccessControl.
object: The name of the object.
projectTeam: The project team associated with the entity, if any.
role: The access permission for the entity. Can be READER or OWNER.
selfLink: The link to this access-control entry.
"""
class ProjectTeamValue(_messages.Message):
r"""The project team associated with the entity, if any.
Fields:
projectNumber: The project number.
team: The team. Can be owners, editors, or viewers.
"""
projectNumber = _messages.StringField(1)
team = _messages.StringField(2)
bucket = _messages.StringField(1)
domain = _messages.StringField(2)
email = _messages.StringField(3)
entity = _messages.StringField(4)
entityId = _messages.StringField(5)
etag = _messages.StringField(6)
generation = _messages.IntegerField(7)
id = _messages.StringField(8)
kind = _messages.StringField(9, default='storage#objectAccessControl')
object = _messages.StringField(10)
projectTeam = _messages.MessageField('ProjectTeamValue', 11)
role = _messages.StringField(12)
selfLink = _messages.StringField(13)
class ObjectAccessControls(_messages.Message):
r"""An access-control list.
Fields:
items: The list of items.
kind: The kind of item this is. For lists of object access control
entries, this is always storage#objectAccessControls.
"""
items = _messages.MessageField('extra_types.JsonValue', 1, repeated=True)
kind = _messages.StringField(2, default='storage#objectAccessControls')
class Objects(_messages.Message):
r"""A list of objects.
Fields:
items: The list of items.
kind: The kind of item this is. For lists of objects, this is always
storage#objects.
nextPageToken: The continuation token, used to page through large result
sets. Provide this value in a subsequent request to return the next page
of results.
prefixes: The list of prefixes of objects matching-but-not-listed up to
and including the requested delimiter.
"""
items = _messages.MessageField('Object', 1, repeated=True)
kind = _messages.StringField(2, default='storage#objects')
nextPageToken = _messages.StringField(3)
prefixes = _messages.StringField(4, repeated=True)
class Policy(_messages.Message):
r"""A bucket/object IAM policy.
Messages:
BindingsValueListEntry: A BindingsValueListEntry object.
Fields:
bindings: An association between a role, which comes with a set of
permissions, and members who may assume that role.
etag: HTTP 1.1 Entity tag for the policy.
kind: The kind of item this is. For policies, this is always
storage#policy. This field is ignored on input.
resourceId: The ID of the resource to which this policy belongs. Will be
of the form buckets/bucket for buckets, and
buckets/bucket/objects/object for objects. A specific generation may be
specified by appending #generationNumber to the end of the object name,
e.g. buckets/my-bucket/objects/data.txt#17. The current generation can
be denoted with #0. This field is ignored on input.
"""
class BindingsValueListEntry(_messages.Message):
r"""A BindingsValueListEntry object.
Fields:
members: A collection of identifiers for members who may assume the
provided role. Recognized identifiers are as follows: - allUsers - A
special identifier that represents anyone on the internet; with or
without a Google account. - allAuthenticatedUsers - A special
identifier that represents anyone who is authenticated with a Google
account or a service account. - user:emailid - An email address that
represents a specific account. For example, user:alice@gmail.com or
user:joe@example.com. - serviceAccount:emailid - An email address
that represents a service account. For example, serviceAccount:my-
other-app@appspot.gserviceaccount.com . - group:emailid - An email
address that represents a Google group. For example,
group:admins@example.com. - domain:domain - A Google Apps domain
name that represents all the users of that domain. For example,
domain:google.com or domain:example.com. - projectOwner:projectid -
Owners of the given project. For example, projectOwner:my-example-
project - projectEditor:projectid - Editors of the given project.
For example, projectEditor:my-example-project -
projectViewer:projectid - Viewers of the given project. For example,
projectViewer:my-example-project
role: The role to which members belong. Two types of roles are
supported: new IAM roles, which grant permissions that do not map
directly to those provided by ACLs, and legacy IAM roles, which do map
directly to ACL permissions. All roles are of the format
roles/storage.specificRole. The new IAM roles are: -
roles/storage.admin - Full control of Google Cloud Storage resources.
- roles/storage.objectViewer - Read-Only access to Google Cloud
Storage objects. - roles/storage.objectCreator - Access to create
objects in Google Cloud Storage. - roles/storage.objectAdmin - Full
control of Google Cloud Storage objects. The legacy IAM roles are:
- roles/storage.legacyObjectReader - Read-only access to objects
without listing. Equivalent to an ACL entry on an object with the
READER role. - roles/storage.legacyObjectOwner - Read/write access
to existing objects without listing. Equivalent to an ACL entry on an
object with the OWNER role. - roles/storage.legacyBucketReader -
Read access to buckets with object listing. Equivalent to an ACL entry
on a bucket with the READER role. - roles/storage.legacyBucketWriter
- Read access to buckets with object listing/creation/deletion.
Equivalent to an ACL entry on a bucket with the WRITER role. -
roles/storage.legacyBucketOwner - Read and write access to existing
buckets with object listing/creation/deletion. Equivalent to an ACL
entry on a bucket with the OWNER role.
"""
members = _messages.StringField(1, repeated=True)
role = _messages.StringField(2)
bindings = _messages.MessageField('BindingsValueListEntry', 1, repeated=True)
etag = _messages.BytesField(2)
kind = _messages.StringField(3, default='storage#policy')
resourceId = _messages.StringField(4)
class RewriteResponse(_messages.Message):
r"""A rewrite response.
Fields:
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | true |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/storage_sample/storage_v1/__init__.py | samples/storage_sample/storage_v1/__init__.py | """Package marker file."""
from __future__ import absolute_import
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/storage_sample/storage_v1/storage_v1_client.py | samples/storage_sample/storage_v1/storage_v1_client.py | """Generated client library for storage version v1."""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.py import base_api
from samples.storage_sample.storage_v1 import storage_v1_messages as messages
class StorageV1(base_api.BaseApiClient):
"""Generated client library for service storage version v1."""
MESSAGES_MODULE = messages
BASE_URL = 'https://www.googleapis.com/storage/v1/'
MTLS_BASE_URL = 'https://www.mtls.googleapis.com/storage/v1/'
_PACKAGE = 'storage'
_SCOPES = ['https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/devstorage.full_control', 'https://www.googleapis.com/auth/devstorage.read_only', 'https://www.googleapis.com/auth/devstorage.read_write']
_VERSION = 'v1'
_CLIENT_ID = 'CLIENT_ID'
_CLIENT_SECRET = 'CLIENT_SECRET'
_USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_CLIENT_CLASS_NAME = 'StorageV1'
_URL_VERSION = 'v1'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None, response_encoding=None):
"""Create a new storage handle."""
url = url or self.BASE_URL
super(StorageV1, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers,
response_encoding=response_encoding)
self.bucketAccessControls = self.BucketAccessControlsService(self)
self.buckets = self.BucketsService(self)
self.channels = self.ChannelsService(self)
self.defaultObjectAccessControls = self.DefaultObjectAccessControlsService(self)
self.notifications = self.NotificationsService(self)
self.objectAccessControls = self.ObjectAccessControlsService(self)
self.objects = self.ObjectsService(self)
class BucketAccessControlsService(base_api.BaseApiService):
"""Service class for the bucketAccessControls resource."""
_NAME = 'bucketAccessControls'
def __init__(self, client):
super(StorageV1.BucketAccessControlsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
r"""Permanently deletes the ACL entry for the specified entity on the specified bucket.
Args:
request: (StorageBucketAccessControlsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(StorageBucketAccessControlsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method='DELETE',
method_id='storage.bucketAccessControls.delete',
ordered_params=['bucket', 'entity'],
path_params=['bucket', 'entity'],
query_params=[],
relative_path='b/{bucket}/acl/{entity}',
request_field='',
request_type_name='StorageBucketAccessControlsDeleteRequest',
response_type_name='StorageBucketAccessControlsDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Returns the ACL entry for the specified entity on the specified bucket.
Args:
request: (StorageBucketAccessControlsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BucketAccessControl) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='storage.bucketAccessControls.get',
ordered_params=['bucket', 'entity'],
path_params=['bucket', 'entity'],
query_params=[],
relative_path='b/{bucket}/acl/{entity}',
request_field='',
request_type_name='StorageBucketAccessControlsGetRequest',
response_type_name='BucketAccessControl',
supports_download=False,
)
def Insert(self, request, global_params=None):
r"""Creates a new ACL entry on the specified bucket.
Args:
request: (BucketAccessControl) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BucketAccessControl) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='storage.bucketAccessControls.insert',
ordered_params=['bucket'],
path_params=['bucket'],
query_params=[],
relative_path='b/{bucket}/acl',
request_field='<request>',
request_type_name='BucketAccessControl',
response_type_name='BucketAccessControl',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Retrieves ACL entries on the specified bucket.
Args:
request: (StorageBucketAccessControlsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BucketAccessControls) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='storage.bucketAccessControls.list',
ordered_params=['bucket'],
path_params=['bucket'],
query_params=[],
relative_path='b/{bucket}/acl',
request_field='',
request_type_name='StorageBucketAccessControlsListRequest',
response_type_name='BucketAccessControls',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates an ACL entry on the specified bucket. This method supports patch semantics.
Args:
request: (BucketAccessControl) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BucketAccessControl) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method='PATCH',
method_id='storage.bucketAccessControls.patch',
ordered_params=['bucket', 'entity'],
path_params=['bucket', 'entity'],
query_params=[],
relative_path='b/{bucket}/acl/{entity}',
request_field='<request>',
request_type_name='BucketAccessControl',
response_type_name='BucketAccessControl',
supports_download=False,
)
def Update(self, request, global_params=None):
r"""Updates an ACL entry on the specified bucket.
Args:
request: (BucketAccessControl) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BucketAccessControl) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method='PUT',
method_id='storage.bucketAccessControls.update',
ordered_params=['bucket', 'entity'],
path_params=['bucket', 'entity'],
query_params=[],
relative_path='b/{bucket}/acl/{entity}',
request_field='<request>',
request_type_name='BucketAccessControl',
response_type_name='BucketAccessControl',
supports_download=False,
)
class BucketsService(base_api.BaseApiService):
"""Service class for the buckets resource."""
_NAME = 'buckets'
def __init__(self, client):
super(StorageV1.BucketsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
r"""Permanently deletes an empty bucket.
Args:
request: (StorageBucketsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(StorageBucketsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method='DELETE',
method_id='storage.buckets.delete',
ordered_params=['bucket'],
path_params=['bucket'],
query_params=['ifMetagenerationMatch', 'ifMetagenerationNotMatch'],
relative_path='b/{bucket}',
request_field='',
request_type_name='StorageBucketsDeleteRequest',
response_type_name='StorageBucketsDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Returns metadata for the specified bucket.
Args:
request: (StorageBucketsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Bucket) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='storage.buckets.get',
ordered_params=['bucket'],
path_params=['bucket'],
query_params=['ifMetagenerationMatch', 'ifMetagenerationNotMatch', 'projection'],
relative_path='b/{bucket}',
request_field='',
request_type_name='StorageBucketsGetRequest',
response_type_name='Bucket',
supports_download=False,
)
def GetIamPolicy(self, request, global_params=None):
r"""Returns an IAM policy for the specified bucket.
Args:
request: (StorageBucketsGetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('GetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='storage.buckets.getIamPolicy',
ordered_params=['bucket'],
path_params=['bucket'],
query_params=[],
relative_path='b/{bucket}/iam',
request_field='',
request_type_name='StorageBucketsGetIamPolicyRequest',
response_type_name='Policy',
supports_download=False,
)
def Insert(self, request, global_params=None):
r"""Creates a new bucket.
Args:
request: (StorageBucketsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Bucket) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='storage.buckets.insert',
ordered_params=['project'],
path_params=[],
query_params=['predefinedAcl', 'predefinedDefaultObjectAcl', 'project', 'projection'],
relative_path='b',
request_field='bucket',
request_type_name='StorageBucketsInsertRequest',
response_type_name='Bucket',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Retrieves a list of buckets for a given project.
Args:
request: (StorageBucketsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Buckets) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='storage.buckets.list',
ordered_params=['project'],
path_params=[],
query_params=['maxResults', 'pageToken', 'prefix', 'project', 'projection'],
relative_path='b',
request_field='',
request_type_name='StorageBucketsListRequest',
response_type_name='Buckets',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates a bucket. This method supports patch semantics.
Args:
request: (StorageBucketsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Bucket) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method='PATCH',
method_id='storage.buckets.patch',
ordered_params=['bucket'],
path_params=['bucket'],
query_params=['ifMetagenerationMatch', 'ifMetagenerationNotMatch', 'predefinedAcl', 'predefinedDefaultObjectAcl', 'projection'],
relative_path='b/{bucket}',
request_field='bucketResource',
request_type_name='StorageBucketsPatchRequest',
response_type_name='Bucket',
supports_download=False,
)
def SetIamPolicy(self, request, global_params=None):
r"""Updates an IAM policy for the specified bucket.
Args:
request: (StorageBucketsSetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('SetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method='PUT',
method_id='storage.buckets.setIamPolicy',
ordered_params=['bucket'],
path_params=['bucket'],
query_params=[],
relative_path='b/{bucket}/iam',
request_field='policy',
request_type_name='StorageBucketsSetIamPolicyRequest',
response_type_name='Policy',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
r"""Tests a set of permissions on the given bucket to see which, if any, are held by the caller.
Args:
request: (StorageBucketsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestIamPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='storage.buckets.testIamPermissions',
ordered_params=['bucket', 'permissions'],
path_params=['bucket'],
query_params=['permissions'],
relative_path='b/{bucket}/iam/testPermissions',
request_field='',
request_type_name='StorageBucketsTestIamPermissionsRequest',
response_type_name='TestIamPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
r"""Updates a bucket.
Args:
request: (StorageBucketsUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Bucket) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method='PUT',
method_id='storage.buckets.update',
ordered_params=['bucket'],
path_params=['bucket'],
query_params=['ifMetagenerationMatch', 'ifMetagenerationNotMatch', 'predefinedAcl', 'predefinedDefaultObjectAcl', 'projection'],
relative_path='b/{bucket}',
request_field='bucketResource',
request_type_name='StorageBucketsUpdateRequest',
response_type_name='Bucket',
supports_download=False,
)
class ChannelsService(base_api.BaseApiService):
"""Service class for the channels resource."""
_NAME = 'channels'
def __init__(self, client):
super(StorageV1.ChannelsService, self).__init__(client)
self._upload_configs = {
}
def Stop(self, request, global_params=None):
r"""Stop watching resources through this channel.
Args:
request: (Channel) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(StorageChannelsStopResponse) The response message.
"""
config = self.GetMethodConfig('Stop')
return self._RunMethod(
config, request, global_params=global_params)
Stop.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='storage.channels.stop',
ordered_params=[],
path_params=[],
query_params=[],
relative_path='channels/stop',
request_field='<request>',
request_type_name='Channel',
response_type_name='StorageChannelsStopResponse',
supports_download=False,
)
class DefaultObjectAccessControlsService(base_api.BaseApiService):
"""Service class for the defaultObjectAccessControls resource."""
_NAME = 'defaultObjectAccessControls'
def __init__(self, client):
super(StorageV1.DefaultObjectAccessControlsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
r"""Permanently deletes the default object ACL entry for the specified entity on the specified bucket.
Args:
request: (StorageDefaultObjectAccessControlsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(StorageDefaultObjectAccessControlsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method='DELETE',
method_id='storage.defaultObjectAccessControls.delete',
ordered_params=['bucket', 'entity'],
path_params=['bucket', 'entity'],
query_params=[],
relative_path='b/{bucket}/defaultObjectAcl/{entity}',
request_field='',
request_type_name='StorageDefaultObjectAccessControlsDeleteRequest',
response_type_name='StorageDefaultObjectAccessControlsDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Returns the default object ACL entry for the specified entity on the specified bucket.
Args:
request: (StorageDefaultObjectAccessControlsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ObjectAccessControl) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='storage.defaultObjectAccessControls.get',
ordered_params=['bucket', 'entity'],
path_params=['bucket', 'entity'],
query_params=[],
relative_path='b/{bucket}/defaultObjectAcl/{entity}',
request_field='',
request_type_name='StorageDefaultObjectAccessControlsGetRequest',
response_type_name='ObjectAccessControl',
supports_download=False,
)
def Insert(self, request, global_params=None):
r"""Creates a new default object ACL entry on the specified bucket.
Args:
request: (ObjectAccessControl) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ObjectAccessControl) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='storage.defaultObjectAccessControls.insert',
ordered_params=['bucket'],
path_params=['bucket'],
query_params=[],
relative_path='b/{bucket}/defaultObjectAcl',
request_field='<request>',
request_type_name='ObjectAccessControl',
response_type_name='ObjectAccessControl',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Retrieves default object ACL entries on the specified bucket.
Args:
request: (StorageDefaultObjectAccessControlsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ObjectAccessControls) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='storage.defaultObjectAccessControls.list',
ordered_params=['bucket'],
path_params=['bucket'],
query_params=['ifMetagenerationMatch', 'ifMetagenerationNotMatch'],
relative_path='b/{bucket}/defaultObjectAcl',
request_field='',
request_type_name='StorageDefaultObjectAccessControlsListRequest',
response_type_name='ObjectAccessControls',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates a default object ACL entry on the specified bucket. This method supports patch semantics.
Args:
request: (ObjectAccessControl) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ObjectAccessControl) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method='PATCH',
method_id='storage.defaultObjectAccessControls.patch',
ordered_params=['bucket', 'entity'],
path_params=['bucket', 'entity'],
query_params=[],
relative_path='b/{bucket}/defaultObjectAcl/{entity}',
request_field='<request>',
request_type_name='ObjectAccessControl',
response_type_name='ObjectAccessControl',
supports_download=False,
)
def Update(self, request, global_params=None):
r"""Updates a default object ACL entry on the specified bucket.
Args:
request: (ObjectAccessControl) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ObjectAccessControl) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method='PUT',
method_id='storage.defaultObjectAccessControls.update',
ordered_params=['bucket', 'entity'],
path_params=['bucket', 'entity'],
query_params=[],
relative_path='b/{bucket}/defaultObjectAcl/{entity}',
request_field='<request>',
request_type_name='ObjectAccessControl',
response_type_name='ObjectAccessControl',
supports_download=False,
)
class NotificationsService(base_api.BaseApiService):
"""Service class for the notifications resource."""
_NAME = 'notifications'
def __init__(self, client):
super(StorageV1.NotificationsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
r"""Permanently deletes a notification subscription.
Args:
request: (StorageNotificationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(StorageNotificationsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method='DELETE',
method_id='storage.notifications.delete',
ordered_params=['notification'],
path_params=['notification'],
query_params=[],
relative_path='notifications/{notification}',
request_field='',
request_type_name='StorageNotificationsDeleteRequest',
response_type_name='StorageNotificationsDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""View a notification configuration.
Args:
request: (StorageNotificationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Notification) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='storage.notifications.get',
ordered_params=['notification'],
path_params=['notification'],
query_params=[],
relative_path='notifications/{notification}',
request_field='',
request_type_name='StorageNotificationsGetRequest',
response_type_name='Notification',
supports_download=False,
)
def Insert(self, request, global_params=None):
r"""Creates a notification subscription for a given bucket.
Args:
request: (Notification) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Notification) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='storage.notifications.insert',
ordered_params=[],
path_params=[],
query_params=[],
relative_path='notifications',
request_field='<request>',
request_type_name='Notification',
response_type_name='Notification',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Retrieves a list of notification subscriptions for a given bucket.
Args:
request: (StorageNotificationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Notifications) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='storage.notifications.list',
ordered_params=['bucket'],
path_params=[],
query_params=['bucket'],
relative_path='notifications',
request_field='',
request_type_name='StorageNotificationsListRequest',
response_type_name='Notifications',
supports_download=False,
)
class ObjectAccessControlsService(base_api.BaseApiService):
"""Service class for the objectAccessControls resource."""
_NAME = 'objectAccessControls'
def __init__(self, client):
super(StorageV1.ObjectAccessControlsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
r"""Permanently deletes the ACL entry for the specified entity on the specified object.
Args:
request: (StorageObjectAccessControlsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(StorageObjectAccessControlsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method='DELETE',
method_id='storage.objectAccessControls.delete',
ordered_params=['bucket', 'object', 'entity'],
path_params=['bucket', 'entity', 'object'],
query_params=['generation'],
relative_path='b/{bucket}/o/{object}/acl/{entity}',
request_field='',
request_type_name='StorageObjectAccessControlsDeleteRequest',
response_type_name='StorageObjectAccessControlsDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Returns the ACL entry for the specified entity on the specified object.
Args:
request: (StorageObjectAccessControlsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ObjectAccessControl) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='storage.objectAccessControls.get',
ordered_params=['bucket', 'object', 'entity'],
path_params=['bucket', 'entity', 'object'],
query_params=['generation'],
relative_path='b/{bucket}/o/{object}/acl/{entity}',
request_field='',
request_type_name='StorageObjectAccessControlsGetRequest',
response_type_name='ObjectAccessControl',
supports_download=False,
)
def Insert(self, request, global_params=None):
r"""Creates a new ACL entry on the specified object.
Args:
request: (StorageObjectAccessControlsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ObjectAccessControl) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='storage.objectAccessControls.insert',
ordered_params=['bucket', 'object'],
path_params=['bucket', 'object'],
query_params=['generation'],
relative_path='b/{bucket}/o/{object}/acl',
request_field='objectAccessControl',
request_type_name='StorageObjectAccessControlsInsertRequest',
response_type_name='ObjectAccessControl',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Retrieves ACL entries on the specified object.
Args:
request: (StorageObjectAccessControlsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ObjectAccessControls) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | true |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/fusiontables_sample/__init__.py | samples/fusiontables_sample/__init__.py | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_client.py | samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_client.py | """Generated client library for fusiontables version v1."""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.py import base_api
from samples.fusiontables_sample.fusiontables_v1 import fusiontables_v1_messages as messages
class FusiontablesV1(base_api.BaseApiClient):
"""Generated client library for service fusiontables version v1."""
MESSAGES_MODULE = messages
BASE_URL = 'https://www.googleapis.com/fusiontables/v1/'
MTLS_BASE_URL = ''
_PACKAGE = 'fusiontables'
_SCOPES = ['https://www.googleapis.com/auth/fusiontables', 'https://www.googleapis.com/auth/fusiontables.readonly']
_VERSION = 'v1'
_CLIENT_ID = 'CLIENT_ID'
_CLIENT_SECRET = 'CLIENT_SECRET'
_USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_CLIENT_CLASS_NAME = 'FusiontablesV1'
_URL_VERSION = 'v1'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None, response_encoding=None):
"""Create a new fusiontables handle."""
url = url or self.BASE_URL
super(FusiontablesV1, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers,
response_encoding=response_encoding)
self.column = self.ColumnService(self)
self.query = self.QueryService(self)
self.style = self.StyleService(self)
self.table = self.TableService(self)
self.task = self.TaskService(self)
self.template = self.TemplateService(self)
class ColumnService(base_api.BaseApiService):
"""Service class for the column resource."""
_NAME = 'column'
def __init__(self, client):
super(FusiontablesV1.ColumnService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
r"""Deletes the column.
Args:
request: (FusiontablesColumnDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(FusiontablesColumnDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method='DELETE',
method_id='fusiontables.column.delete',
ordered_params=['tableId', 'columnId'],
path_params=['columnId', 'tableId'],
query_params=[],
relative_path='tables/{tableId}/columns/{columnId}',
request_field='',
request_type_name='FusiontablesColumnDeleteRequest',
response_type_name='FusiontablesColumnDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Retrieves a specific column by its id.
Args:
request: (FusiontablesColumnGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Column) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='fusiontables.column.get',
ordered_params=['tableId', 'columnId'],
path_params=['columnId', 'tableId'],
query_params=[],
relative_path='tables/{tableId}/columns/{columnId}',
request_field='',
request_type_name='FusiontablesColumnGetRequest',
response_type_name='Column',
supports_download=False,
)
def Insert(self, request, global_params=None):
r"""Adds a new column to the table.
Args:
request: (FusiontablesColumnInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Column) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='fusiontables.column.insert',
ordered_params=['tableId'],
path_params=['tableId'],
query_params=[],
relative_path='tables/{tableId}/columns',
request_field='column',
request_type_name='FusiontablesColumnInsertRequest',
response_type_name='Column',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Retrieves a list of columns.
Args:
request: (FusiontablesColumnListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ColumnList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='fusiontables.column.list',
ordered_params=['tableId'],
path_params=['tableId'],
query_params=['maxResults', 'pageToken'],
relative_path='tables/{tableId}/columns',
request_field='',
request_type_name='FusiontablesColumnListRequest',
response_type_name='ColumnList',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the name or type of an existing column. This method supports patch semantics.
Args:
request: (FusiontablesColumnPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Column) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method='PATCH',
method_id='fusiontables.column.patch',
ordered_params=['tableId', 'columnId'],
path_params=['columnId', 'tableId'],
query_params=[],
relative_path='tables/{tableId}/columns/{columnId}',
request_field='column',
request_type_name='FusiontablesColumnPatchRequest',
response_type_name='Column',
supports_download=False,
)
def Update(self, request, global_params=None):
r"""Updates the name or type of an existing column.
Args:
request: (FusiontablesColumnUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Column) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method='PUT',
method_id='fusiontables.column.update',
ordered_params=['tableId', 'columnId'],
path_params=['columnId', 'tableId'],
query_params=[],
relative_path='tables/{tableId}/columns/{columnId}',
request_field='column',
request_type_name='FusiontablesColumnUpdateRequest',
response_type_name='Column',
supports_download=False,
)
class QueryService(base_api.BaseApiService):
"""Service class for the query resource."""
_NAME = 'query'
def __init__(self, client):
super(FusiontablesV1.QueryService, self).__init__(client)
self._upload_configs = {
}
def Sql(self, request, global_params=None, download=None):
r"""Executes an SQL SELECT/INSERT/UPDATE/DELETE/SHOW/DESCRIBE/CREATE statement.
Args:
request: (FusiontablesQuerySqlRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
download: (Download, default: None) If present, download
data from the request via this stream.
Returns:
(Sqlresponse) The response message.
"""
config = self.GetMethodConfig('Sql')
return self._RunMethod(
config, request, global_params=global_params,
download=download)
Sql.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='fusiontables.query.sql',
ordered_params=['sql'],
path_params=[],
query_params=['hdrs', 'sql', 'typed'],
relative_path='query',
request_field='',
request_type_name='FusiontablesQuerySqlRequest',
response_type_name='Sqlresponse',
supports_download=True,
)
def SqlGet(self, request, global_params=None, download=None):
r"""Executes an SQL SELECT/SHOW/DESCRIBE statement.
Args:
request: (FusiontablesQuerySqlGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
download: (Download, default: None) If present, download
data from the request via this stream.
Returns:
(Sqlresponse) The response message.
"""
config = self.GetMethodConfig('SqlGet')
return self._RunMethod(
config, request, global_params=global_params,
download=download)
SqlGet.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='fusiontables.query.sqlGet',
ordered_params=['sql'],
path_params=[],
query_params=['hdrs', 'sql', 'typed'],
relative_path='query',
request_field='',
request_type_name='FusiontablesQuerySqlGetRequest',
response_type_name='Sqlresponse',
supports_download=True,
)
class StyleService(base_api.BaseApiService):
"""Service class for the style resource."""
_NAME = 'style'
def __init__(self, client):
super(FusiontablesV1.StyleService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
r"""Deletes a style.
Args:
request: (FusiontablesStyleDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(FusiontablesStyleDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method='DELETE',
method_id='fusiontables.style.delete',
ordered_params=['tableId', 'styleId'],
path_params=['styleId', 'tableId'],
query_params=[],
relative_path='tables/{tableId}/styles/{styleId}',
request_field='',
request_type_name='FusiontablesStyleDeleteRequest',
response_type_name='FusiontablesStyleDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a specific style.
Args:
request: (FusiontablesStyleGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(StyleSetting) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='fusiontables.style.get',
ordered_params=['tableId', 'styleId'],
path_params=['styleId', 'tableId'],
query_params=[],
relative_path='tables/{tableId}/styles/{styleId}',
request_field='',
request_type_name='FusiontablesStyleGetRequest',
response_type_name='StyleSetting',
supports_download=False,
)
def Insert(self, request, global_params=None):
r"""Adds a new style for the table.
Args:
request: (StyleSetting) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(StyleSetting) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='fusiontables.style.insert',
ordered_params=['tableId'],
path_params=['tableId'],
query_params=[],
relative_path='tables/{tableId}/styles',
request_field='<request>',
request_type_name='StyleSetting',
response_type_name='StyleSetting',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Retrieves a list of styles.
Args:
request: (FusiontablesStyleListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(StyleSettingList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='fusiontables.style.list',
ordered_params=['tableId'],
path_params=['tableId'],
query_params=['maxResults', 'pageToken'],
relative_path='tables/{tableId}/styles',
request_field='',
request_type_name='FusiontablesStyleListRequest',
response_type_name='StyleSettingList',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates an existing style. This method supports patch semantics.
Args:
request: (StyleSetting) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(StyleSetting) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method='PATCH',
method_id='fusiontables.style.patch',
ordered_params=['tableId', 'styleId'],
path_params=['styleId', 'tableId'],
query_params=[],
relative_path='tables/{tableId}/styles/{styleId}',
request_field='<request>',
request_type_name='StyleSetting',
response_type_name='StyleSetting',
supports_download=False,
)
def Update(self, request, global_params=None):
r"""Updates an existing style.
Args:
request: (StyleSetting) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(StyleSetting) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method='PUT',
method_id='fusiontables.style.update',
ordered_params=['tableId', 'styleId'],
path_params=['styleId', 'tableId'],
query_params=[],
relative_path='tables/{tableId}/styles/{styleId}',
request_field='<request>',
request_type_name='StyleSetting',
response_type_name='StyleSetting',
supports_download=False,
)
class TableService(base_api.BaseApiService):
"""Service class for the table resource."""
_NAME = 'table'
def __init__(self, client):
super(FusiontablesV1.TableService, self).__init__(client)
self._upload_configs = {
'ImportRows': base_api.ApiUploadInfo(
accept=['application/octet-stream'],
max_size=262144000,
resumable_multipart=True,
resumable_path='/resumable/upload/fusiontables/v1/tables/{tableId}/import',
simple_multipart=True,
simple_path='/upload/fusiontables/v1/tables/{tableId}/import',
),
'ImportTable': base_api.ApiUploadInfo(
accept=['application/octet-stream'],
max_size=262144000,
resumable_multipart=True,
resumable_path='/resumable/upload/fusiontables/v1/tables/import',
simple_multipart=True,
simple_path='/upload/fusiontables/v1/tables/import',
),
}
def Copy(self, request, global_params=None):
r"""Copies a table.
Args:
request: (FusiontablesTableCopyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Table) The response message.
"""
config = self.GetMethodConfig('Copy')
return self._RunMethod(
config, request, global_params=global_params)
Copy.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='fusiontables.table.copy',
ordered_params=['tableId'],
path_params=['tableId'],
query_params=['copyPresentation'],
relative_path='tables/{tableId}/copy',
request_field='',
request_type_name='FusiontablesTableCopyRequest',
response_type_name='Table',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a table.
Args:
request: (FusiontablesTableDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(FusiontablesTableDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method='DELETE',
method_id='fusiontables.table.delete',
ordered_params=['tableId'],
path_params=['tableId'],
query_params=[],
relative_path='tables/{tableId}',
request_field='',
request_type_name='FusiontablesTableDeleteRequest',
response_type_name='FusiontablesTableDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Retrieves a specific table by its id.
Args:
request: (FusiontablesTableGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Table) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='fusiontables.table.get',
ordered_params=['tableId'],
path_params=['tableId'],
query_params=[],
relative_path='tables/{tableId}',
request_field='',
request_type_name='FusiontablesTableGetRequest',
response_type_name='Table',
supports_download=False,
)
def ImportRows(self, request, global_params=None, upload=None):
r"""Import more rows into a table.
Args:
request: (FusiontablesTableImportRowsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
upload: (Upload, default: None) If present, upload
this stream with the request.
Returns:
(Import) The response message.
"""
config = self.GetMethodConfig('ImportRows')
upload_config = self.GetUploadConfig('ImportRows')
return self._RunMethod(
config, request, global_params=global_params,
upload=upload, upload_config=upload_config)
ImportRows.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='fusiontables.table.importRows',
ordered_params=['tableId'],
path_params=['tableId'],
query_params=['delimiter', 'encoding', 'endLine', 'isStrict', 'startLine'],
relative_path='tables/{tableId}/import',
request_field='',
request_type_name='FusiontablesTableImportRowsRequest',
response_type_name='Import',
supports_download=False,
)
def ImportTable(self, request, global_params=None, upload=None):
r"""Import a new table.
Args:
request: (FusiontablesTableImportTableRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
upload: (Upload, default: None) If present, upload
this stream with the request.
Returns:
(Table) The response message.
"""
config = self.GetMethodConfig('ImportTable')
upload_config = self.GetUploadConfig('ImportTable')
return self._RunMethod(
config, request, global_params=global_params,
upload=upload, upload_config=upload_config)
ImportTable.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='fusiontables.table.importTable',
ordered_params=['name'],
path_params=[],
query_params=['delimiter', 'encoding', 'name'],
relative_path='tables/import',
request_field='',
request_type_name='FusiontablesTableImportTableRequest',
response_type_name='Table',
supports_download=False,
)
def Insert(self, request, global_params=None):
r"""Creates a new table.
Args:
request: (Table) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Table) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='fusiontables.table.insert',
ordered_params=[],
path_params=[],
query_params=[],
relative_path='tables',
request_field='<request>',
request_type_name='Table',
response_type_name='Table',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Retrieves a list of tables a user owns.
Args:
request: (FusiontablesTableListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TableList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='fusiontables.table.list',
ordered_params=[],
path_params=[],
query_params=['maxResults', 'pageToken'],
relative_path='tables',
request_field='',
request_type_name='FusiontablesTableListRequest',
response_type_name='TableList',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates an existing table. Unless explicitly requested, only the name, description, and attribution will be updated. This method supports patch semantics.
Args:
request: (FusiontablesTablePatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Table) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method='PATCH',
method_id='fusiontables.table.patch',
ordered_params=['tableId'],
path_params=['tableId'],
query_params=['replaceViewDefinition'],
relative_path='tables/{tableId}',
request_field='table',
request_type_name='FusiontablesTablePatchRequest',
response_type_name='Table',
supports_download=False,
)
def Update(self, request, global_params=None):
r"""Updates an existing table. Unless explicitly requested, only the name, description, and attribution will be updated.
Args:
request: (FusiontablesTableUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Table) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method='PUT',
method_id='fusiontables.table.update',
ordered_params=['tableId'],
path_params=['tableId'],
query_params=['replaceViewDefinition'],
relative_path='tables/{tableId}',
request_field='table',
request_type_name='FusiontablesTableUpdateRequest',
response_type_name='Table',
supports_download=False,
)
class TaskService(base_api.BaseApiService):
"""Service class for the task resource."""
_NAME = 'task'
def __init__(self, client):
super(FusiontablesV1.TaskService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
r"""Deletes the task, unless already started.
Args:
request: (FusiontablesTaskDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(FusiontablesTaskDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method='DELETE',
method_id='fusiontables.task.delete',
ordered_params=['tableId', 'taskId'],
path_params=['tableId', 'taskId'],
query_params=[],
relative_path='tables/{tableId}/tasks/{taskId}',
request_field='',
request_type_name='FusiontablesTaskDeleteRequest',
response_type_name='FusiontablesTaskDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Retrieves a specific task by its id.
Args:
request: (FusiontablesTaskGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Task) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='fusiontables.task.get',
ordered_params=['tableId', 'taskId'],
path_params=['tableId', 'taskId'],
query_params=[],
relative_path='tables/{tableId}/tasks/{taskId}',
request_field='',
request_type_name='FusiontablesTaskGetRequest',
response_type_name='Task',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Retrieves a list of tasks.
Args:
request: (FusiontablesTaskListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TaskList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='fusiontables.task.list',
ordered_params=['tableId'],
path_params=['tableId'],
query_params=['maxResults', 'pageToken', 'startIndex'],
relative_path='tables/{tableId}/tasks',
request_field='',
request_type_name='FusiontablesTaskListRequest',
response_type_name='TaskList',
supports_download=False,
)
class TemplateService(base_api.BaseApiService):
"""Service class for the template resource."""
_NAME = 'template'
def __init__(self, client):
super(FusiontablesV1.TemplateService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
r"""Deletes a template.
Args:
request: (FusiontablesTemplateDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(FusiontablesTemplateDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method='DELETE',
method_id='fusiontables.template.delete',
ordered_params=['tableId', 'templateId'],
path_params=['tableId', 'templateId'],
query_params=[],
relative_path='tables/{tableId}/templates/{templateId}',
request_field='',
request_type_name='FusiontablesTemplateDeleteRequest',
response_type_name='FusiontablesTemplateDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Retrieves a specific template by its id.
Args:
request: (FusiontablesTemplateGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Template) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='fusiontables.template.get',
ordered_params=['tableId', 'templateId'],
path_params=['tableId', 'templateId'],
query_params=[],
relative_path='tables/{tableId}/templates/{templateId}',
request_field='',
request_type_name='FusiontablesTemplateGetRequest',
response_type_name='Template',
supports_download=False,
)
def Insert(self, request, global_params=None):
r"""Creates a new template for the table.
Args:
request: (Template) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Template) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='fusiontables.template.insert',
ordered_params=['tableId'],
path_params=['tableId'],
query_params=[],
relative_path='tables/{tableId}/templates',
request_field='<request>',
request_type_name='Template',
response_type_name='Template',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Retrieves a list of templates.
Args:
request: (FusiontablesTemplateListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TemplateList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='fusiontables.template.list',
ordered_params=['tableId'],
path_params=['tableId'],
query_params=['maxResults', 'pageToken'],
relative_path='tables/{tableId}/templates',
request_field='',
request_type_name='FusiontablesTemplateListRequest',
response_type_name='TemplateList',
supports_download=False,
)
def Patch(self, request, global_params=None):
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | true |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_messages.py | samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_messages.py | """Generated message classes for fusiontables version v1.
API for working with Fusion Tables data.
"""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.protorpclite import messages as _messages
from apitools.base.py import extra_types
package = 'fusiontables'
class Bucket(_messages.Message):
r"""Specifies the minimum and maximum values, the color, opacity, icon and
weight of a bucket within a StyleSetting.
Fields:
color: Color of line or the interior of a polygon in #RRGGBB format.
icon: Icon name used for a point.
max: Maximum value in the selected column for a row to be styled according
to the bucket color, opacity, icon, or weight.
min: Minimum value in the selected column for a row to be styled according
to the bucket color, opacity, icon, or weight.
opacity: Opacity of the color: 0.0 (transparent) to 1.0 (opaque).
weight: Width of a line (in pixels).
"""
color = _messages.StringField(1)
icon = _messages.StringField(2)
max = _messages.FloatField(3)
min = _messages.FloatField(4)
opacity = _messages.FloatField(5)
weight = _messages.IntegerField(6, variant=_messages.Variant.INT32)
class Column(_messages.Message):
r"""Specifies the id, name and type of a column in a table.
Messages:
BaseColumnValue: Optional identifier of the base column. If present, this
column is derived from the specified base column.
Fields:
baseColumn: Optional identifier of the base column. If present, this
column is derived from the specified base column.
columnId: Identifier for the column.
description: Optional column description.
graph_predicate: Optional column predicate. Used to map table to graph
data model (subject,predicate,object) See http://www.w3.org/TR/2014/REC-
rdf11-concepts-20140225/#data-model
kind: Type name: a template for an individual column.
name: Required name of the column.
type: Required type of the column.
"""
class BaseColumnValue(_messages.Message):
r"""Optional identifier of the base column. If present, this column is
derived from the specified base column.
Fields:
columnId: The id of the column in the base table from which this column
is derived.
tableIndex: Offset to the entry in the list of base tables in the table
definition.
"""
columnId = _messages.IntegerField(1, variant=_messages.Variant.INT32)
tableIndex = _messages.IntegerField(2, variant=_messages.Variant.INT32)
baseColumn = _messages.MessageField('BaseColumnValue', 1)
columnId = _messages.IntegerField(2, variant=_messages.Variant.INT32)
description = _messages.StringField(3)
graph_predicate = _messages.StringField(4)
kind = _messages.StringField(5, default='fusiontables#column')
name = _messages.StringField(6)
type = _messages.StringField(7)
class ColumnList(_messages.Message):
r"""Represents a list of columns in a table.
Fields:
items: List of all requested columns.
kind: Type name: a list of all columns.
nextPageToken: Token used to access the next page of this result. No token
is displayed if there are no more pages left.
totalItems: Total number of columns for the table.
"""
items = _messages.MessageField('Column', 1, repeated=True)
kind = _messages.StringField(2, default='fusiontables#columnList')
nextPageToken = _messages.StringField(3)
totalItems = _messages.IntegerField(4, variant=_messages.Variant.INT32)
class FusiontablesColumnDeleteRequest(_messages.Message):
r"""A FusiontablesColumnDeleteRequest object.
Fields:
columnId: Name or identifier for the column being deleted.
tableId: Table from which the column is being deleted.
"""
columnId = _messages.StringField(1, required=True)
tableId = _messages.StringField(2, required=True)
class FusiontablesColumnDeleteResponse(_messages.Message):
r"""An empty FusiontablesColumnDelete response."""
class FusiontablesColumnGetRequest(_messages.Message):
r"""A FusiontablesColumnGetRequest object.
Fields:
columnId: Name or identifier for the column that is being requested.
tableId: Table to which the column belongs.
"""
columnId = _messages.StringField(1, required=True)
tableId = _messages.StringField(2, required=True)
class FusiontablesColumnInsertRequest(_messages.Message):
r"""A FusiontablesColumnInsertRequest object.
Fields:
column: A Column resource to be passed as the request body.
tableId: Table for which a new column is being added.
"""
column = _messages.MessageField('Column', 1)
tableId = _messages.StringField(2, required=True)
class FusiontablesColumnListRequest(_messages.Message):
r"""A FusiontablesColumnListRequest object.
Fields:
maxResults: Maximum number of columns to return. Optional. Default is 5.
pageToken: Continuation token specifying which result page to return.
Optional.
tableId: Table whose columns are being listed.
"""
maxResults = _messages.IntegerField(1, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(2)
tableId = _messages.StringField(3, required=True)
class FusiontablesColumnPatchRequest(_messages.Message):
r"""A FusiontablesColumnPatchRequest object.
Fields:
column: A Column resource to be passed as the request body.
columnId: Name or identifier for the column that is being updated.
tableId: Table for which the column is being updated.
"""
column = _messages.MessageField('Column', 1)
columnId = _messages.StringField(2, required=True)
tableId = _messages.StringField(3, required=True)
class FusiontablesColumnUpdateRequest(_messages.Message):
r"""A FusiontablesColumnUpdateRequest object.
Fields:
column: A Column resource to be passed as the request body.
columnId: Name or identifier for the column that is being updated.
tableId: Table for which the column is being updated.
"""
column = _messages.MessageField('Column', 1)
columnId = _messages.StringField(2, required=True)
tableId = _messages.StringField(3, required=True)
class FusiontablesQuerySqlGetRequest(_messages.Message):
r"""A FusiontablesQuerySqlGetRequest object.
Fields:
hdrs: Should column names be included (in the first row)?. Default is
true.
sql: An SQL SELECT/SHOW/DESCRIBE statement.
typed: Should typed values be returned in the (JSON) response -- numbers
for numeric values and parsed geometries for KML values? Default is
true.
"""
hdrs = _messages.BooleanField(1)
sql = _messages.StringField(2, required=True)
typed = _messages.BooleanField(3)
class FusiontablesQuerySqlRequest(_messages.Message):
r"""A FusiontablesQuerySqlRequest object.
Fields:
hdrs: Should column names be included (in the first row)?. Default is
true.
sql: An SQL SELECT/SHOW/DESCRIBE/INSERT/UPDATE/DELETE/CREATE statement.
typed: Should typed values be returned in the (JSON) response -- numbers
for numeric values and parsed geometries for KML values? Default is
true.
"""
hdrs = _messages.BooleanField(1)
sql = _messages.StringField(2, required=True)
typed = _messages.BooleanField(3)
class FusiontablesStyleDeleteRequest(_messages.Message):
r"""A FusiontablesStyleDeleteRequest object.
Fields:
styleId: Identifier (within a table) for the style being deleted
tableId: Table from which the style is being deleted
"""
styleId = _messages.IntegerField(1, required=True, variant=_messages.Variant.INT32)
tableId = _messages.StringField(2, required=True)
class FusiontablesStyleDeleteResponse(_messages.Message):
r"""An empty FusiontablesStyleDelete response."""
class FusiontablesStyleGetRequest(_messages.Message):
r"""A FusiontablesStyleGetRequest object.
Fields:
styleId: Identifier (integer) for a specific style in a table
tableId: Table to which the requested style belongs
"""
styleId = _messages.IntegerField(1, required=True, variant=_messages.Variant.INT32)
tableId = _messages.StringField(2, required=True)
class FusiontablesStyleListRequest(_messages.Message):
r"""A FusiontablesStyleListRequest object.
Fields:
maxResults: Maximum number of styles to return. Optional. Default is 5.
pageToken: Continuation token specifying which result page to return.
Optional.
tableId: Table whose styles are being listed
"""
maxResults = _messages.IntegerField(1, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(2)
tableId = _messages.StringField(3, required=True)
class FusiontablesTableCopyRequest(_messages.Message):
r"""A FusiontablesTableCopyRequest object.
Fields:
copyPresentation: Whether to also copy tabs, styles, and templates.
Default is false.
tableId: ID of the table that is being copied.
"""
copyPresentation = _messages.BooleanField(1)
tableId = _messages.StringField(2, required=True)
class FusiontablesTableDeleteRequest(_messages.Message):
r"""A FusiontablesTableDeleteRequest object.
Fields:
tableId: ID of the table that is being deleted.
"""
tableId = _messages.StringField(1, required=True)
class FusiontablesTableDeleteResponse(_messages.Message):
r"""An empty FusiontablesTableDelete response."""
class FusiontablesTableGetRequest(_messages.Message):
r"""A FusiontablesTableGetRequest object.
Fields:
tableId: Identifier(ID) for the table being requested.
"""
tableId = _messages.StringField(1, required=True)
class FusiontablesTableImportRowsRequest(_messages.Message):
r"""A FusiontablesTableImportRowsRequest object.
Fields:
delimiter: The delimiter used to separate cell values. This can only
consist of a single character. Default is ','.
encoding: The encoding of the content. Default is UTF-8. Use 'auto-detect'
if you are unsure of the encoding.
endLine: The index of the last line from which to start importing,
exclusive. Thus, the number of imported lines is endLine - startLine. If
this parameter is not provided, the file will be imported until the last
line of the file. If endLine is negative, then the imported content will
exclude the last endLine lines. That is, if endline is negative, no line
will be imported whose index is greater than N + endLine where N is the
number of lines in the file, and the number of imported lines will be N
+ endLine - startLine.
isStrict: Whether the CSV must have the same number of values for each
row. If false, rows with fewer values will be padded with empty values.
Default is true.
startLine: The index of the first line from which to start importing,
inclusive. Default is 0.
tableId: The table into which new rows are being imported.
"""
delimiter = _messages.StringField(1)
encoding = _messages.StringField(2)
endLine = _messages.IntegerField(3, variant=_messages.Variant.INT32)
isStrict = _messages.BooleanField(4)
startLine = _messages.IntegerField(5, variant=_messages.Variant.INT32)
tableId = _messages.StringField(6, required=True)
class FusiontablesTableImportTableRequest(_messages.Message):
r"""A FusiontablesTableImportTableRequest object.
Fields:
delimiter: The delimiter used to separate cell values. This can only
consist of a single character. Default is ','.
encoding: The encoding of the content. Default is UTF-8. Use 'auto-detect'
if you are unsure of the encoding.
name: The name to be assigned to the new table.
"""
delimiter = _messages.StringField(1)
encoding = _messages.StringField(2)
name = _messages.StringField(3, required=True)
class FusiontablesTableListRequest(_messages.Message):
r"""A FusiontablesTableListRequest object.
Fields:
maxResults: Maximum number of styles to return. Optional. Default is 5.
pageToken: Continuation token specifying which result page to return.
Optional.
"""
maxResults = _messages.IntegerField(1, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(2)
class FusiontablesTablePatchRequest(_messages.Message):
r"""A FusiontablesTablePatchRequest object.
Fields:
replaceViewDefinition: Should the view definition also be updated? The
specified view definition replaces the existing one. Only a view can be
updated with a new definition.
table: A Table resource to be passed as the request body.
tableId: ID of the table that is being updated.
"""
replaceViewDefinition = _messages.BooleanField(1)
table = _messages.MessageField('Table', 2)
tableId = _messages.StringField(3, required=True)
class FusiontablesTableUpdateRequest(_messages.Message):
r"""A FusiontablesTableUpdateRequest object.
Fields:
replaceViewDefinition: Should the view definition also be updated? The
specified view definition replaces the existing one. Only a view can be
updated with a new definition.
table: A Table resource to be passed as the request body.
tableId: ID of the table that is being updated.
"""
replaceViewDefinition = _messages.BooleanField(1)
table = _messages.MessageField('Table', 2)
tableId = _messages.StringField(3, required=True)
class FusiontablesTaskDeleteRequest(_messages.Message):
r"""A FusiontablesTaskDeleteRequest object.
Fields:
tableId: Table from which the task is being deleted.
taskId: A string attribute.
"""
tableId = _messages.StringField(1, required=True)
taskId = _messages.StringField(2, required=True)
class FusiontablesTaskDeleteResponse(_messages.Message):
r"""An empty FusiontablesTaskDelete response."""
class FusiontablesTaskGetRequest(_messages.Message):
r"""A FusiontablesTaskGetRequest object.
Fields:
tableId: Table to which the task belongs.
taskId: A string attribute.
"""
tableId = _messages.StringField(1, required=True)
taskId = _messages.StringField(2, required=True)
class FusiontablesTaskListRequest(_messages.Message):
r"""A FusiontablesTaskListRequest object.
Fields:
maxResults: Maximum number of columns to return. Optional. Default is 5.
pageToken: A string attribute.
startIndex: A integer attribute.
tableId: Table whose tasks are being listed.
"""
maxResults = _messages.IntegerField(1, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(2)
startIndex = _messages.IntegerField(3, variant=_messages.Variant.UINT32)
tableId = _messages.StringField(4, required=True)
class FusiontablesTemplateDeleteRequest(_messages.Message):
r"""A FusiontablesTemplateDeleteRequest object.
Fields:
tableId: Table from which the template is being deleted
templateId: Identifier for the template which is being deleted
"""
tableId = _messages.StringField(1, required=True)
templateId = _messages.IntegerField(2, required=True, variant=_messages.Variant.INT32)
class FusiontablesTemplateDeleteResponse(_messages.Message):
r"""An empty FusiontablesTemplateDelete response."""
class FusiontablesTemplateGetRequest(_messages.Message):
r"""A FusiontablesTemplateGetRequest object.
Fields:
tableId: Table to which the template belongs
templateId: Identifier for the template that is being requested
"""
tableId = _messages.StringField(1, required=True)
templateId = _messages.IntegerField(2, required=True, variant=_messages.Variant.INT32)
class FusiontablesTemplateListRequest(_messages.Message):
r"""A FusiontablesTemplateListRequest object.
Fields:
maxResults: Maximum number of templates to return. Optional. Default is 5.
pageToken: Continuation token specifying which results page to return.
Optional.
tableId: Identifier for the table whose templates are being requested
"""
maxResults = _messages.IntegerField(1, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(2)
tableId = _messages.StringField(3, required=True)
class Geometry(_messages.Message):
r"""Represents a Geometry object.
Fields:
geometries: The list of geometries in this geometry collection.
geometry: A extra_types.JsonValue attribute.
type: Type: A collection of geometries.
"""
geometries = _messages.MessageField('extra_types.JsonValue', 1, repeated=True)
geometry = _messages.MessageField('extra_types.JsonValue', 2)
type = _messages.StringField(3, default='GeometryCollection')
class Import(_messages.Message):
r"""Represents an import request.
Fields:
kind: Type name: a template for an import request.
numRowsReceived: The number of rows received from the import request.
"""
kind = _messages.StringField(1, default='fusiontables#import')
numRowsReceived = _messages.IntegerField(2)
class Line(_messages.Message):
r"""Represents a line geometry.
Messages:
CoordinatesValueListEntry: Single entry in a CoordinatesValue.
Fields:
coordinates: The coordinates that define the line.
type: Type: A line geometry.
"""
class CoordinatesValueListEntry(_messages.Message):
r"""Single entry in a CoordinatesValue.
Fields:
entry: A number attribute.
"""
entry = _messages.FloatField(1, repeated=True)
coordinates = _messages.MessageField('CoordinatesValueListEntry', 1, repeated=True)
type = _messages.StringField(2, default='LineString')
class LineStyle(_messages.Message):
r"""Represents a LineStyle within a StyleSetting
Fields:
strokeColor: Color of the line in #RRGGBB format.
strokeColorStyler: Column-value, gradient or buckets styler that is used
to determine the line color and opacity.
strokeOpacity: Opacity of the line : 0.0 (transparent) to 1.0 (opaque).
strokeWeight: Width of the line in pixels.
strokeWeightStyler: Column-value or bucket styler that is used to
determine the width of the line.
"""
strokeColor = _messages.StringField(1)
strokeColorStyler = _messages.MessageField('StyleFunction', 2)
strokeOpacity = _messages.FloatField(3)
strokeWeight = _messages.IntegerField(4, variant=_messages.Variant.INT32)
strokeWeightStyler = _messages.MessageField('StyleFunction', 5)
class Point(_messages.Message):
r"""Represents a point object.
Fields:
coordinates: The coordinates that define the point.
type: Point: A point geometry.
"""
coordinates = _messages.FloatField(1, repeated=True)
type = _messages.StringField(2, default='Point')
class PointStyle(_messages.Message):
r"""Represents a PointStyle within a StyleSetting
Fields:
iconName: Name of the icon. Use values defined in
http://www.google.com/fusiontables/DataSource?dsrcid=308519
iconStyler: Column or a bucket value from which the icon name is to be
determined.
"""
iconName = _messages.StringField(1)
iconStyler = _messages.MessageField('StyleFunction', 2)
class Polygon(_messages.Message):
r"""Represents a polygon object.
Messages:
CoordinatesValueListEntry: Single entry in a CoordinatesValue.
Fields:
coordinates: The coordinates that define the polygon.
type: Type: A polygon geometry.
"""
class CoordinatesValueListEntry(_messages.Message):
r"""Single entry in a CoordinatesValue.
Messages:
EntryValueListEntry: Single entry in a EntryValue.
Fields:
entry: A EntryValueListEntry attribute.
"""
class EntryValueListEntry(_messages.Message):
r"""Single entry in a EntryValue.
Fields:
entry: A number attribute.
"""
entry = _messages.FloatField(1, repeated=True)
entry = _messages.MessageField('EntryValueListEntry', 1, repeated=True)
coordinates = _messages.MessageField('CoordinatesValueListEntry', 1, repeated=True)
type = _messages.StringField(2, default='Polygon')
class PolygonStyle(_messages.Message):
r"""Represents a PolygonStyle within a StyleSetting
Fields:
fillColor: Color of the interior of the polygon in #RRGGBB format.
fillColorStyler: Column-value, gradient, or bucket styler that is used to
determine the interior color and opacity of the polygon.
fillOpacity: Opacity of the interior of the polygon: 0.0 (transparent) to
1.0 (opaque).
strokeColor: Color of the polygon border in #RRGGBB format.
strokeColorStyler: Column-value, gradient or buckets styler that is used
to determine the border color and opacity.
strokeOpacity: Opacity of the polygon border: 0.0 (transparent) to 1.0
(opaque).
strokeWeight: Width of the polyon border in pixels.
strokeWeightStyler: Column-value or bucket styler that is used to
determine the width of the polygon border.
"""
fillColor = _messages.StringField(1)
fillColorStyler = _messages.MessageField('StyleFunction', 2)
fillOpacity = _messages.FloatField(3)
strokeColor = _messages.StringField(4)
strokeColorStyler = _messages.MessageField('StyleFunction', 5)
strokeOpacity = _messages.FloatField(6)
strokeWeight = _messages.IntegerField(7, variant=_messages.Variant.INT32)
strokeWeightStyler = _messages.MessageField('StyleFunction', 8)
class Sqlresponse(_messages.Message):
r"""Represents a response to an sql statement.
Messages:
RowsValueListEntry: Single entry in a RowsValue.
Fields:
columns: Columns in the table.
kind: Type name: a template for an individual table.
rows: The rows in the table. For each cell we print out whatever cell
value (e.g., numeric, string) exists. Thus it is important that each
cell contains only one value.
"""
class RowsValueListEntry(_messages.Message):
r"""Single entry in a RowsValue.
Fields:
entry: A extra_types.JsonValue attribute.
"""
entry = _messages.MessageField('extra_types.JsonValue', 1, repeated=True)
columns = _messages.StringField(1, repeated=True)
kind = _messages.StringField(2, default='fusiontables#sqlresponse')
rows = _messages.MessageField('RowsValueListEntry', 3, repeated=True)
class StandardQueryParameters(_messages.Message):
r"""Query parameters accepted by all methods.
Enums:
AltValueValuesEnum: Data format for the response.
Fields:
alt: Data format for the response.
fields: Selector specifying which fields to include in a partial response.
key: API key. Your API key identifies your project and provides you with
API access, quota, and reports. Required unless you provide an OAuth 2.0
token.
oauth_token: OAuth 2.0 token for the current user.
prettyPrint: Returns response with indentations and line breaks.
quotaUser: Available to use for quota purposes for server-side
applications. Can be any arbitrary string assigned to a user, but should
not exceed 40 characters. Overrides userIp if both are provided.
trace: A tracing token of the form "token:<tokenid>" to include in api
requests.
userIp: IP address of the site where the request originates. Use this if
you want to enforce per-user limits.
"""
class AltValueValuesEnum(_messages.Enum):
r"""Data format for the response.
Values:
csv: Responses with Content-Type of text/csv
json: Responses with Content-Type of application/json
"""
csv = 0
json = 1
alt = _messages.EnumField('AltValueValuesEnum', 1, default='json')
fields = _messages.StringField(2)
key = _messages.StringField(3)
oauth_token = _messages.StringField(4)
prettyPrint = _messages.BooleanField(5, default=True)
quotaUser = _messages.StringField(6)
trace = _messages.StringField(7)
userIp = _messages.StringField(8)
class StyleFunction(_messages.Message):
r"""Represents a StyleFunction within a StyleSetting
Messages:
GradientValue: Gradient function that interpolates a range of colors based
on column value.
Fields:
buckets: Bucket function that assigns a style based on the range a column
value falls into.
columnName: Name of the column whose value is used in the style.
gradient: Gradient function that interpolates a range of colors based on
column value.
kind: Stylers can be one of three kinds: "fusiontables#fromColumn" if the
column value is to be used as is, i.e., the column values can have
colors in #RRGGBBAA format or integer line widths or icon names;
"fusiontables#gradient" if the styling of the row is to be based on
applying the gradient function on the column value; or
"fusiontables#buckets" if the styling is to based on the bucket into
which the the column value falls.
"""
class GradientValue(_messages.Message):
r"""Gradient function that interpolates a range of colors based on column
value.
Messages:
ColorsValueListEntry: A ColorsValueListEntry object.
Fields:
colors: Array with two or more colors.
max: Higher-end of the interpolation range: rows with this value will be
assigned to colors[n-1].
min: Lower-end of the interpolation range: rows with this value will be
assigned to colors[0].
"""
class ColorsValueListEntry(_messages.Message):
r"""A ColorsValueListEntry object.
Fields:
color: Color in #RRGGBB format.
opacity: Opacity of the color: 0.0 (transparent) to 1.0 (opaque).
"""
color = _messages.StringField(1)
opacity = _messages.FloatField(2)
colors = _messages.MessageField('ColorsValueListEntry', 1, repeated=True)
max = _messages.FloatField(2)
min = _messages.FloatField(3)
buckets = _messages.MessageField('Bucket', 1, repeated=True)
columnName = _messages.StringField(2)
gradient = _messages.MessageField('GradientValue', 3)
kind = _messages.StringField(4)
class StyleSetting(_messages.Message):
r"""Represents a complete StyleSettings object. The primary key is a
combination of the tableId and a styleId.
Fields:
kind: Type name: an individual style setting. A StyleSetting contains the
style defintions for points, lines, and polygons in a table. Since a
table can have any one or all of them, a style definition can have
point, line and polygon style definitions.
markerOptions: Style definition for points in the table.
name: Optional name for the style setting.
polygonOptions: Style definition for polygons in the table.
polylineOptions: Style definition for lines in the table.
styleId: Identifier for the style setting (unique only within tables).
tableId: Identifier for the table.
"""
kind = _messages.StringField(1, default='fusiontables#styleSetting')
markerOptions = _messages.MessageField('PointStyle', 2)
name = _messages.StringField(3)
polygonOptions = _messages.MessageField('PolygonStyle', 4)
polylineOptions = _messages.MessageField('LineStyle', 5)
styleId = _messages.IntegerField(6, variant=_messages.Variant.INT32)
tableId = _messages.StringField(7)
class StyleSettingList(_messages.Message):
r"""Represents a list of styles for a given table.
Fields:
items: All requested style settings.
kind: Type name: in this case, a list of style settings.
nextPageToken: Token used to access the next page of this result. No token
is displayed if there are no more pages left.
totalItems: Total number of styles for the table.
"""
items = _messages.MessageField('StyleSetting', 1, repeated=True)
kind = _messages.StringField(2, default='fusiontables#styleSettingList')
nextPageToken = _messages.StringField(3)
totalItems = _messages.IntegerField(4, variant=_messages.Variant.INT32)
class Table(_messages.Message):
r"""Represents a table. Specifies the name, whether it is exportable,
description, attribution, and attribution link.
Fields:
attribution: Optional attribution assigned to the table.
attributionLink: Optional link for attribution.
baseTableIds: Optional base table identifier if this table is a view or
merged table.
columns: Columns in the table.
description: Optional description assigned to the table.
isExportable: Variable for whether table is exportable.
kind: Type name: a template for an individual table.
name: Name assigned to a table.
sql: Optional sql that encodes the table definition for derived tables.
tableId: Encrypted unique alphanumeric identifier for the table.
"""
attribution = _messages.StringField(1)
attributionLink = _messages.StringField(2)
baseTableIds = _messages.StringField(3, repeated=True)
columns = _messages.MessageField('Column', 4, repeated=True)
description = _messages.StringField(5)
isExportable = _messages.BooleanField(6)
kind = _messages.StringField(7, default='fusiontables#table')
name = _messages.StringField(8)
sql = _messages.StringField(9)
tableId = _messages.StringField(10)
class TableList(_messages.Message):
r"""Represents a list of tables.
Fields:
items: List of all requested tables.
kind: Type name: a list of all tables.
nextPageToken: Token used to access the next page of this result. No token
is displayed if there are no more pages left.
"""
items = _messages.MessageField('Table', 1, repeated=True)
kind = _messages.StringField(2, default='fusiontables#tableList')
nextPageToken = _messages.StringField(3)
class Task(_messages.Message):
r"""Specifies the identifier, name, and type of a task in a table.
Fields:
kind: Type of the resource. This is always "fusiontables#task".
progress: An indication of task progress.
started: false while the table is busy with some other task. true if this
background task is currently running.
taskId: Identifier for the task.
type: Type of background task. One of DELETE_ROWS Deletes one or more
rows from the table. ADD_ROWS "Adds one or more rows to a table.
Includes importing data into a new table and importing more rows into an
existing table. ADD_COLUMN Adds a new column to the table. CHANGE_TYPE
Changes the type of a column.
"""
kind = _messages.StringField(1, default='fusiontables#task')
progress = _messages.StringField(2)
started = _messages.BooleanField(3)
taskId = _messages.IntegerField(4)
type = _messages.StringField(5)
class TaskList(_messages.Message):
r"""Represents a list of tasks for a table.
Fields:
items: List of all requested tasks.
kind: Type of the resource. This is always "fusiontables#taskList".
nextPageToken: Token used to access the next page of this result. No token
is displayed if there are no more pages left.
totalItems: Total number of tasks for the table.
"""
items = _messages.MessageField('Task', 1, repeated=True)
kind = _messages.StringField(2, default='fusiontables#taskList')
nextPageToken = _messages.StringField(3)
totalItems = _messages.IntegerField(4, variant=_messages.Variant.INT32)
class Template(_messages.Message):
r"""Represents the contents of InfoWindow templates.
Fields:
automaticColumnNames: List of columns from which the template is to be
automatically constructed. Only one of body or automaticColumns can be
specified.
body: Body of the template. It contains HTML with {column_name} to insert
values from a particular column. The body is sanitized to remove certain
tags, e.g., script. Only one of body or automaticColumns can be
specified.
kind: Type name: a template for the info window contents. The template can
either include an HTML body or a list of columns from which the template
is computed automatically.
name: Optional name assigned to a template.
tableId: Identifier for the table for which the template is defined.
templateId: Identifier for the template, unique within the context of a
particular table.
"""
automaticColumnNames = _messages.StringField(1, repeated=True)
body = _messages.StringField(2)
kind = _messages.StringField(3, default='fusiontables#template')
name = _messages.StringField(4)
tableId = _messages.StringField(5)
templateId = _messages.IntegerField(6, variant=_messages.Variant.INT32)
class TemplateList(_messages.Message):
r"""Represents a list of templates for a given table.
Fields:
items: List of all requested templates.
kind: Type name: a list of all templates.
nextPageToken: Token used to access the next page of this result. No token
is displayed if there are no more pages left.
totalItems: Total number of templates for the table.
"""
items = _messages.MessageField('Template', 1, repeated=True)
kind = _messages.StringField(2, default='fusiontables#templateList')
nextPageToken = _messages.StringField(3)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | true |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/fusiontables_sample/fusiontables_v1/__init__.py | samples/fusiontables_sample/fusiontables_v1/__init__.py | """Package marker file."""
from __future__ import absolute_import
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/iam_sample/__init__.py | samples/iam_sample/__init__.py | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/iam_sample/iam_client_test.py | samples/iam_sample/iam_client_test.py | #
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for generated sample module."""
import unittest
import six
from apitools.base.py.testing import mock
from samples.iam_sample.iam_v1 import iam_v1_client # nopep8
from samples.iam_sample.iam_v1 import iam_v1_messages # nopep8
class DnsGenClientSanityTest(unittest.TestCase):
def testBaseUrl(self):
self.assertEqual(u'https://iam.googleapis.com/',
iam_v1_client.IamV1.BASE_URL)
def testMessagesModule(self):
self.assertEqual(iam_v1_messages, iam_v1_client.IamV1.MESSAGES_MODULE)
def testAttributes(self):
inner_classes = set([])
for key, value in iam_v1_client.IamV1.__dict__.items():
if isinstance(value, six.class_types):
inner_classes.add(key)
self.assertEqual(set([
'IamPoliciesService',
'ProjectsService',
'ProjectsServiceAccountsKeysService',
'ProjectsServiceAccountsService',
'RolesService']), inner_classes)
class IamGenClientTest(unittest.TestCase):
def setUp(self):
self.mocked_iam_v1 = mock.Client(iam_v1_client.IamV1)
self.mocked_iam_v1.Mock()
self.addCleanup(self.mocked_iam_v1.Unmock)
def testFlatPath(self):
get_method_config = (self.mocked_iam_v1.projects_serviceAccounts_keys
.GetMethodConfig('Get'))
self.assertEqual('v1/projects/{projectsId}/serviceAccounts'
'/{serviceAccountsId}/keys/{keysId}',
get_method_config.flat_path)
self.assertEqual('v1/{+name}', get_method_config.relative_path)
def testServiceAccountsKeysList(self):
response_key = iam_v1_messages.ServiceAccountKey(
name=u'test-key')
self.mocked_iam_v1.projects_serviceAccounts_keys.List.Expect(
iam_v1_messages.IamProjectsServiceAccountsKeysListRequest(
name=u'test-service-account.'),
iam_v1_messages.ListServiceAccountKeysResponse(
keys=[response_key]))
result = self.mocked_iam_v1.projects_serviceAccounts_keys.List(
iam_v1_messages.IamProjectsServiceAccountsKeysListRequest(
name=u'test-service-account.'))
self.assertEqual([response_key], result.keys)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/iam_sample/iam_v1/iam_v1_messages.py | samples/iam_sample/iam_v1/iam_v1_messages.py | """Generated message classes for iam version v1.
Manages identity and access control for Google Cloud Platform resources,
including the creation of service accounts, which you can use to authenticate
to Google and make API calls.
"""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.protorpclite import messages as _messages
from apitools.base.py import encoding
package = 'iam'
class AuditConfig(_messages.Message):
r"""Enables "data access" audit logging for a service and specifies a list
of members that are log-exempted.
Fields:
exemptedMembers: Specifies the identities that are exempted from "data
access" audit logging for the `service` specified above. Follows the
same format of Binding.members.
service: Specifies a service that will be enabled for "data access" audit
logging. For example, `resourcemanager`, `storage`, `compute`.
`allServices` is a special value that covers all services.
"""
exemptedMembers = _messages.StringField(1, repeated=True)
service = _messages.StringField(2)
class Binding(_messages.Message):
r"""Associates `members` with a `role`.
Fields:
members: Specifies the identities requesting access for a Cloud Platform
resource. `members` can have the following values: * `allUsers`: A
special identifier that represents anyone who is on the internet;
with or without a Google account. * `allAuthenticatedUsers`: A special
identifier that represents anyone who is authenticated with a Google
account or a service account. * `user:{emailid}`: An email address that
represents a specific Google account. For example, `alice@gmail.com`
or `joe@example.com`. * `serviceAccount:{emailid}`: An email address
that represents a service account. For example, `my-other-
app@appspot.gserviceaccount.com`. * `group:{emailid}`: An email address
that represents a Google group. For example, `admins@example.com`. *
`domain:{domain}`: A Google Apps domain name that represents all the
users of that domain. For example, `google.com` or `example.com`.
role: Role that is assigned to `members`. For example, `roles/viewer`,
`roles/editor`, or `roles/owner`. Required
"""
members = _messages.StringField(1, repeated=True)
role = _messages.StringField(2)
class CloudAuditOptions(_messages.Message):
r"""Write a Cloud Audit log"""
class Condition(_messages.Message):
r"""A condition to be met.
Enums:
IamValueValuesEnum: Trusted attributes supplied by the IAM system.
OpValueValuesEnum: An operator to apply the subject with.
SysValueValuesEnum: Trusted attributes supplied by any service that owns
resources and uses the IAM system for access control.
Fields:
iam: Trusted attributes supplied by the IAM system.
op: An operator to apply the subject with.
svc: Trusted attributes discharged by the service.
sys: Trusted attributes supplied by any service that owns resources and
uses the IAM system for access control.
value: DEPRECATED. Use 'values' instead.
values: The objects of the condition. This is mutually exclusive with
'value'.
"""
class IamValueValuesEnum(_messages.Enum):
r"""Trusted attributes supplied by the IAM system.
Values:
NO_ATTR: Default non-attribute.
AUTHORITY: Either principal or (if present) authority
ATTRIBUTION: selector Always the original principal, but making clear
"""
NO_ATTR = 0
AUTHORITY = 1
ATTRIBUTION = 2
class OpValueValuesEnum(_messages.Enum):
r"""An operator to apply the subject with.
Values:
NO_OP: Default no-op.
EQUALS: DEPRECATED. Use IN instead.
NOT_EQUALS: DEPRECATED. Use NOT_IN instead.
IN: Set-inclusion check.
NOT_IN: Set-exclusion check.
DISCHARGED: Subject is discharged
"""
NO_OP = 0
EQUALS = 1
NOT_EQUALS = 2
IN = 3
NOT_IN = 4
DISCHARGED = 5
class SysValueValuesEnum(_messages.Enum):
r"""Trusted attributes supplied by any service that owns resources and
uses the IAM system for access control.
Values:
NO_ATTR: Default non-attribute type
REGION: Region of the resource
SERVICE: Service name
NAME: Resource name
IP: IP address of the caller
"""
NO_ATTR = 0
REGION = 1
SERVICE = 2
NAME = 3
IP = 4
iam = _messages.EnumField('IamValueValuesEnum', 1)
op = _messages.EnumField('OpValueValuesEnum', 2)
svc = _messages.StringField(3)
sys = _messages.EnumField('SysValueValuesEnum', 4)
value = _messages.StringField(5)
values = _messages.StringField(6, repeated=True)
class CounterOptions(_messages.Message):
r"""Options for counters
Fields:
field: The field value to attribute.
metric: The metric to update.
"""
field = _messages.StringField(1)
metric = _messages.StringField(2)
class CreateServiceAccountKeyRequest(_messages.Message):
r"""The service account key create request.
Enums:
PrivateKeyTypeValueValuesEnum: The output format of the private key.
`GOOGLE_CREDENTIALS_FILE` is the default output format.
Fields:
privateKeyType: The output format of the private key.
`GOOGLE_CREDENTIALS_FILE` is the default output format.
"""
class PrivateKeyTypeValueValuesEnum(_messages.Enum):
r"""The output format of the private key. `GOOGLE_CREDENTIALS_FILE` is the
default output format.
Values:
TYPE_UNSPECIFIED: Unspecified. Equivalent to
`TYPE_GOOGLE_CREDENTIALS_FILE`.
TYPE_PKCS12_FILE: PKCS12 format. The password for the PKCS12 file is
`notasecret`. For more information, see
https://tools.ietf.org/html/rfc7292.
TYPE_GOOGLE_CREDENTIALS_FILE: Google Credentials File format.
"""
TYPE_UNSPECIFIED = 0
TYPE_PKCS12_FILE = 1
TYPE_GOOGLE_CREDENTIALS_FILE = 2
privateKeyType = _messages.EnumField('PrivateKeyTypeValueValuesEnum', 1)
class CreateServiceAccountRequest(_messages.Message):
r"""The service account create request.
Fields:
accountId: Required. The account id that is used to generate the service
account email address and a stable unique id. It is unique within a
project, must be 1-63 characters long, and match the regular expression
`[a-z]([-a-z0-9]*[a-z0-9])` to comply with RFC1035.
serviceAccount: The ServiceAccount resource to create. Currently, only the
following values are user assignable: `display_name` .
"""
accountId = _messages.StringField(1)
serviceAccount = _messages.MessageField('ServiceAccount', 2)
class DataAccessOptions(_messages.Message):
r"""Write a Data Access (Gin) log"""
class Empty(_messages.Message):
r"""A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to use it as the request
or the response type of an API method. For instance: service Foo {
rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The
JSON representation for `Empty` is empty JSON object `{}`.
"""
class GetPolicyDetailsRequest(_messages.Message):
r"""The request to get the current policy and the policies on the inherited
resources the user has access to.
Fields:
fullResourcePath: REQUIRED: The full resource path of the current policy
being requested, e.g., `//dataflow.googleapis.com/projects/../jobs/..`.
pageSize: Limit on the number of policies to include in the response.
Further accounts can subsequently be obtained by including the
GetPolicyDetailsResponse.next_page_token in a subsequent request. If
zero, the default page size 20 will be used. Must be given a value in
range [0, 100], otherwise an invalid argument error will be returned.
pageToken: Optional pagination token returned in an earlier
GetPolicyDetailsResponse.next_page_token response.
"""
fullResourcePath = _messages.StringField(1)
pageSize = _messages.IntegerField(2, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(3)
class GetPolicyDetailsResponse(_messages.Message):
r"""The response to the `GetPolicyDetailsRequest` containing the current
policy and the policies on the inherited resources the user has access to.
Fields:
nextPageToken: To retrieve the next page of results, set
GetPolicyDetailsRequest.page_token to this value. If this value is
empty, then there are not any further policies that the user has access
to. The lifetime is 60 minutes. An "Expired pagination token" error will
be returned if exceeded.
policies: The current policy and all the inherited policies the user has
access to.
"""
nextPageToken = _messages.StringField(1)
policies = _messages.MessageField('PolicyDetail', 2, repeated=True)
class IamProjectsServiceAccountsCreateRequest(_messages.Message):
r"""A IamProjectsServiceAccountsCreateRequest object.
Fields:
createServiceAccountRequest: A CreateServiceAccountRequest resource to be
passed as the request body.
name: Required. The resource name of the project associated with the
service accounts, such as `projects/my-project-123`.
"""
createServiceAccountRequest = _messages.MessageField('CreateServiceAccountRequest', 1)
name = _messages.StringField(2, required=True)
class IamProjectsServiceAccountsDeleteRequest(_messages.Message):
r"""A IamProjectsServiceAccountsDeleteRequest object.
Fields:
name: The resource name of the service account in the following format:
`projects/{project}/serviceAccounts/{account}`. Using `-` as a wildcard
for the project will infer the project from the account. The `account`
value can be the `email` address or the `unique_id` of the service
account.
"""
name = _messages.StringField(1, required=True)
class IamProjectsServiceAccountsGetIamPolicyRequest(_messages.Message):
r"""A IamProjectsServiceAccountsGetIamPolicyRequest object.
Fields:
options_requestedPolicyVersion: Optional. The policy format version to be
returned. Acceptable values are 0 and 1. If the value is 0, or the field
is omitted, policy format version 1 will be returned.
resource: REQUIRED: The resource for which the policy is being requested.
`resource` is usually specified as a path, such as
`projects/*project*/zones/*zone*/disks/*disk*`. The format for the path
specified in this value is resource specific and is specified in the
`getIamPolicy` documentation.
"""
options_requestedPolicyVersion = _messages.IntegerField(1, variant=_messages.Variant.INT32)
resource = _messages.StringField(2, required=True)
class IamProjectsServiceAccountsGetRequest(_messages.Message):
r"""A IamProjectsServiceAccountsGetRequest object.
Fields:
name: The resource name of the service account in the following format:
`projects/{project}/serviceAccounts/{account}`. Using `-` as a wildcard
for the project will infer the project from the account. The `account`
value can be the `email` address or the `unique_id` of the service
account.
"""
name = _messages.StringField(1, required=True)
class IamProjectsServiceAccountsKeysCreateRequest(_messages.Message):
r"""A IamProjectsServiceAccountsKeysCreateRequest object.
Fields:
createServiceAccountKeyRequest: A CreateServiceAccountKeyRequest resource
to be passed as the request body.
name: The resource name of the service account in the following format:
`projects/{project}/serviceAccounts/{account}`. Using `-` as a wildcard
for the project will infer the project from the account. The `account`
value can be the `email` address or the `unique_id` of the service
account.
"""
createServiceAccountKeyRequest = _messages.MessageField('CreateServiceAccountKeyRequest', 1)
name = _messages.StringField(2, required=True)
class IamProjectsServiceAccountsKeysDeleteRequest(_messages.Message):
r"""A IamProjectsServiceAccountsKeysDeleteRequest object.
Fields:
name: The resource name of the service account key in the following
format: `projects/{project}/serviceAccounts/{account}/keys/{key}`. Using
`-` as a wildcard for the project will infer the project from the
account. The `account` value can be the `email` address or the
`unique_id` of the service account.
"""
name = _messages.StringField(1, required=True)
class IamProjectsServiceAccountsKeysGetRequest(_messages.Message):
r"""A IamProjectsServiceAccountsKeysGetRequest object.
Enums:
PublicKeyTypeValueValuesEnum: The output format of the public key
requested. X509_PEM is the default output format.
Fields:
name: The resource name of the service account key in the following
format: `projects/{project}/serviceAccounts/{account}/keys/{key}`.
Using `-` as a wildcard for the project will infer the project from the
account. The `account` value can be the `email` address or the
`unique_id` of the service account.
publicKeyType: The output format of the public key requested. X509_PEM is
the default output format.
"""
class PublicKeyTypeValueValuesEnum(_messages.Enum):
r"""The output format of the public key requested. X509_PEM is the default
output format.
Values:
TYPE_NONE: <no description>
TYPE_X509_PEM_FILE: <no description>
TYPE_RAW_PUBLIC_KEY: <no description>
"""
TYPE_NONE = 0
TYPE_X509_PEM_FILE = 1
TYPE_RAW_PUBLIC_KEY = 2
name = _messages.StringField(1, required=True)
publicKeyType = _messages.EnumField('PublicKeyTypeValueValuesEnum', 2)
class IamProjectsServiceAccountsKeysListRequest(_messages.Message):
r"""A IamProjectsServiceAccountsKeysListRequest object.
Enums:
KeyTypesValueValuesEnum: Filters the types of keys the user wants to
include in the list response. Duplicate key types are not allowed. If no
key type is provided, all keys are returned.
Fields:
keyTypes: Filters the types of keys the user wants to include in the list
response. Duplicate key types are not allowed. If no key type is
provided, all keys are returned.
name: The resource name of the service account in the following format:
`projects/{project}/serviceAccounts/{account}`. Using `-` as a wildcard
for the project, will infer the project from the account. The `account`
value can be the `email` address or the `unique_id` of the service
account.
"""
class KeyTypesValueValuesEnum(_messages.Enum):
r"""Filters the types of keys the user wants to include in the list
response. Duplicate key types are not allowed. If no key type is provided,
all keys are returned.
Values:
KEY_TYPE_UNSPECIFIED: <no description>
USER_MANAGED: <no description>
SYSTEM_MANAGED: <no description>
"""
KEY_TYPE_UNSPECIFIED = 0
USER_MANAGED = 1
SYSTEM_MANAGED = 2
keyTypes = _messages.EnumField('KeyTypesValueValuesEnum', 1, repeated=True)
name = _messages.StringField(2, required=True)
class IamProjectsServiceAccountsListRequest(_messages.Message):
r"""A IamProjectsServiceAccountsListRequest object.
Fields:
name: Required. The resource name of the project associated with the
service accounts, such as `projects/my-project-123`.
pageSize: Optional limit on the number of service accounts to include in
the response. Further accounts can subsequently be obtained by including
the ListServiceAccountsResponse.next_page_token in a subsequent request.
pageToken: Optional pagination token returned in an earlier
ListServiceAccountsResponse.next_page_token.
removeDeletedServiceAccounts: Do not list service accounts deleted from
Gaia. <b><font color="red">DO NOT INCLUDE IN EXTERNAL
DOCUMENTATION</font></b>.
"""
name = _messages.StringField(1, required=True)
pageSize = _messages.IntegerField(2, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(3)
removeDeletedServiceAccounts = _messages.BooleanField(4)
class IamProjectsServiceAccountsSetIamPolicyRequest(_messages.Message):
r"""A IamProjectsServiceAccountsSetIamPolicyRequest object.
Fields:
resource: REQUIRED: The resource for which the policy is being specified.
`resource` is usually specified as a path, such as
`projects/*project*/zones/*zone*/disks/*disk*`. The format for the path
specified in this value is resource specific and is specified in the
`setIamPolicy` documentation.
setIamPolicyRequest: A SetIamPolicyRequest resource to be passed as the
request body.
"""
resource = _messages.StringField(1, required=True)
setIamPolicyRequest = _messages.MessageField('SetIamPolicyRequest', 2)
class IamProjectsServiceAccountsSignBlobRequest(_messages.Message):
r"""A IamProjectsServiceAccountsSignBlobRequest object.
Fields:
name: The resource name of the service account in the following format:
`projects/{project}/serviceAccounts/{account}`. Using `-` as a wildcard
for the project will infer the project from the account. The `account`
value can be the `email` address or the `unique_id` of the service
account.
signBlobRequest: A SignBlobRequest resource to be passed as the request
body.
"""
name = _messages.StringField(1, required=True)
signBlobRequest = _messages.MessageField('SignBlobRequest', 2)
class IamProjectsServiceAccountsSignJwtRequest(_messages.Message):
r"""A IamProjectsServiceAccountsSignJwtRequest object.
Fields:
name: The resource name of the service account in the following format:
`projects/{project}/serviceAccounts/{account}`. Using `-` as a wildcard
for the project will infer the project from the account. The `account`
value can be the `email` address or the `unique_id` of the service
account.
signJwtRequest: A SignJwtRequest resource to be passed as the request
body.
"""
name = _messages.StringField(1, required=True)
signJwtRequest = _messages.MessageField('SignJwtRequest', 2)
class IamProjectsServiceAccountsTestIamPermissionsRequest(_messages.Message):
r"""A IamProjectsServiceAccountsTestIamPermissionsRequest object.
Fields:
resource: REQUIRED: The resource for which the policy detail is being
requested. `resource` is usually specified as a path, such as
`projects/*project*/zones/*zone*/disks/*disk*`. The format for the path
specified in this value is resource specific and is specified in the
`testIamPermissions` documentation.
testIamPermissionsRequest: A TestIamPermissionsRequest resource to be
passed as the request body.
"""
resource = _messages.StringField(1, required=True)
testIamPermissionsRequest = _messages.MessageField('TestIamPermissionsRequest', 2)
class ListServiceAccountKeysResponse(_messages.Message):
r"""The service account keys list response.
Fields:
keys: The public keys for the service account.
"""
keys = _messages.MessageField('ServiceAccountKey', 1, repeated=True)
class ListServiceAccountsResponse(_messages.Message):
r"""The service account list response.
Fields:
accounts: The list of matching service accounts.
nextPageToken: To retrieve the next page of results, set
ListServiceAccountsRequest.page_token to this value.
"""
accounts = _messages.MessageField('ServiceAccount', 1, repeated=True)
nextPageToken = _messages.StringField(2)
class LogConfig(_messages.Message):
r"""Specifies what kind of log the caller must write Increment a streamz
counter with the specified metric and field names. Metric names should
start with a '/', generally be lowercase-only, and end in "_count". Field
names should not contain an initial slash. The actual exported metric names
will have "/iam/policy" prepended. Field names correspond to IAM request
parameters and field values are their respective values. At present the
only supported field names are - "iam_principal", corresponding to
IAMContext.principal; - "" (empty string), resulting in one aggretated
counter with no field. Examples: counter { metric: "/debug_access_count"
field: "iam_principal" } ==> increment counter
/iam/policy/backend_debug_access_count
{iam_principal=[value of IAMContext.principal]} At this time we do not
support: * multiple field names (though this may be supported in the future)
* decrementing the counter * incrementing it by anything other than 1
Fields:
cloudAudit: Cloud audit options.
counter: Counter options.
dataAccess: Data access options.
"""
cloudAudit = _messages.MessageField('CloudAuditOptions', 1)
counter = _messages.MessageField('CounterOptions', 2)
dataAccess = _messages.MessageField('DataAccessOptions', 3)
class Policy(_messages.Message):
r"""Defines an Identity and Access Management (IAM) policy. It is used to
specify access control policies for Cloud Platform resources. A `Policy`
consists of a list of `bindings`. A `Binding` binds a list of `members` to a
`role`, where the members can be user accounts, Google groups, Google
domains, and service accounts. A `role` is a named list of permissions
defined by IAM. **Example** { "bindings": [ {
"role": "roles/owner", "members": [
"user:mike@example.com", "group:admins@example.com",
"domain:google.com", "serviceAccount:my-other-
app@appspot.gserviceaccount.com", ] }, {
"role": "roles/viewer", "members": ["user:sean@example.com"]
} ] } For a description of IAM and its features, see the [IAM
developer's guide](https://cloud.google.com/iam).
Fields:
auditConfigs: Specifies audit logging configs for "data access". "data
access": generally refers to data reads/writes and admin reads. "admin
activity": generally refers to admin writes. Note: `AuditConfig`
doesn't apply to "admin activity", which always enables audit logging.
bindings: Associates a list of `members` to a `role`. Multiple `bindings`
must not be specified for the same `role`. `bindings` with no members
will result in an error.
etag: `etag` is used for optimistic concurrency control as a way to help
prevent simultaneous updates of a policy from overwriting each other. It
is strongly suggested that systems make use of the `etag` in the read-
modify-write cycle to perform policy updates in order to avoid race
conditions: An `etag` is returned in the response to `getIamPolicy`, and
systems are expected to put that etag in the request to `setIamPolicy`
to ensure that their change will be applied to the same version of the
policy. If no `etag` is provided in the call to `setIamPolicy`, then
the existing policy is overwritten blindly.
iamOwned: A boolean attribute.
rules: If more than one rule is specified, the rules are applied in the
following manner: - All matching LOG rules are always applied. - If any
DENY/DENY_WITH_LOG rule matches, permission is denied. Logging will be
applied if one or more matching rule requires logging. - Otherwise, if
any ALLOW/ALLOW_WITH_LOG rule matches, permission is granted.
Logging will be applied if one or more matching rule requires logging. -
Otherwise, if no rule applies, permission is denied.
version: Version of the `Policy`. The default version is 0.
"""
auditConfigs = _messages.MessageField('AuditConfig', 1, repeated=True)
bindings = _messages.MessageField('Binding', 2, repeated=True)
etag = _messages.BytesField(3)
iamOwned = _messages.BooleanField(4)
rules = _messages.MessageField('Rule', 5, repeated=True)
version = _messages.IntegerField(6, variant=_messages.Variant.INT32)
class PolicyDetail(_messages.Message):
r"""A policy and its full resource path.
Fields:
fullResourcePath: The full resource path of the policy e.g.,
`//dataflow.googleapis.com/projects/../jobs/..`. Note that a resource
and its inherited resource have different `full_resource_path`.
policy: The policy of a `resource/project/folder`.
"""
fullResourcePath = _messages.StringField(1)
policy = _messages.MessageField('Policy', 2)
class QueryGrantableRolesRequest(_messages.Message):
r"""The grantable role query request.
Fields:
fullResourceName: Required. The full resource name to query from the list
of grantable roles. The name follows the Google Cloud Platform resource
format. For example, a Cloud Platform project with id `my-project` will
be named `//cloudresourcemanager.googleapis.com/projects/my-project`.
"""
fullResourceName = _messages.StringField(1)
class QueryGrantableRolesResponse(_messages.Message):
r"""The grantable role query response.
Fields:
roles: The list of matching roles.
"""
roles = _messages.MessageField('Role', 1, repeated=True)
class Role(_messages.Message):
r"""A role in the Identity and Access Management API.
Fields:
apiTokens: A string attribute.
description: Optional. A human-readable description for the role.
name: The name of the role. Examples of roles names are: `roles/editor`,
`roles/viewer` and `roles/logging.viewer`.
title: Optional. A human-readable title for the role. Typically this is
limited to 100 UTF-8 bytes.
"""
apiTokens = _messages.StringField(1, repeated=True)
description = _messages.StringField(2)
name = _messages.StringField(3)
title = _messages.StringField(4)
class Rule(_messages.Message):
r"""A rule to be applied in a Policy.
Enums:
ActionValueValuesEnum: Required
Fields:
action: Required
conditions: Additional restrictions that must be met
description: Human-readable description of the rule.
in_: If one or more 'in' clauses are specified, the rule matches if the
PRINCIPAL/AUTHORITY_SELECTOR is in at least one of these entries.
logConfig: The config returned to callers of tech.iam.IAM.CheckPolicy for
any entries that match the LOG action.
notIn: If one or more 'not_in' clauses are specified, the rule matches if
the PRINCIPAL/AUTHORITY_SELECTOR is in none of the entries. The format
for in and not_in entries is the same as for members in a Binding (see
google/iam/v1/policy.proto).
permissions: A permission is a string of form '<service>.<resource
type>.<verb>' (e.g., 'storage.buckets.list'). A value of '*' matches all
permissions, and a verb part of '*' (e.g., 'storage.buckets.*') matches
all verbs.
"""
class ActionValueValuesEnum(_messages.Enum):
r"""Required
Values:
NO_ACTION: Default no action.
ALLOW: Matching 'Entries' grant access.
ALLOW_WITH_LOG: Matching 'Entries' grant access and the caller promises
to log the request per the returned log_configs.
DENY: Matching 'Entries' deny access.
DENY_WITH_LOG: Matching 'Entries' deny access and the caller promises to
log the request per the returned log_configs.
LOG: Matching 'Entries' tell IAM.Check callers to generate logs.
"""
NO_ACTION = 0
ALLOW = 1
ALLOW_WITH_LOG = 2
DENY = 3
DENY_WITH_LOG = 4
LOG = 5
action = _messages.EnumField('ActionValueValuesEnum', 1)
conditions = _messages.MessageField('Condition', 2, repeated=True)
description = _messages.StringField(3)
in_ = _messages.StringField(4, repeated=True)
logConfig = _messages.MessageField('LogConfig', 5, repeated=True)
notIn = _messages.StringField(6, repeated=True)
permissions = _messages.StringField(7, repeated=True)
class ServiceAccount(_messages.Message):
r"""A service account in the Identity and Access Management API. To create
a service account, specify the `project_id` and the `account_id` for the
account. The `account_id` is unique within the project, and is used to
generate the service account email address and a stable `unique_id`. All
other methods can identify the service account using the format
`projects/{project}/serviceAccounts/{account}`. Using `-` as a wildcard for
the project will infer the project from the account. The `account` value can
be the `email` address or the `unique_id` of the service account.
Fields:
description: Optional. A user-specified opaque description of the service
account.
displayName: Optional. A user-specified description of the service
account. Must be fewer than 100 UTF-8 bytes.
email: @OutputOnly The email address of the service account.
etag: Used to perform a consistent read-modify-write.
name: The resource name of the service account in the following format:
`projects/{project}/serviceAccounts/{account}`. Requests using `-` as a
wildcard for the project will infer the project from the `account` and
the `account` value can be the `email` address or the `unique_id` of the
service account. In responses the resource name will always be in the
format `projects/{project}/serviceAccounts/{email}`.
oauth2ClientId: @OutputOnly. The OAuth2 client id for the service account.
This is used in conjunction with the OAuth2 clientconfig API to make
three legged OAuth2 (3LO) flows to access the data of Google users.
projectId: @OutputOnly The id of the project that owns the service
account.
uniqueId: @OutputOnly The unique and stable id of the service account.
"""
description = _messages.StringField(1)
displayName = _messages.StringField(2)
email = _messages.StringField(3)
etag = _messages.BytesField(4)
name = _messages.StringField(5)
oauth2ClientId = _messages.StringField(6)
projectId = _messages.StringField(7)
uniqueId = _messages.StringField(8)
class ServiceAccountKey(_messages.Message):
r"""Represents a service account key. A service account has two sets of
key-pairs: user-managed, and system-managed. User-managed key-pairs can be
created and deleted by users. Users are responsible for rotating these keys
periodically to ensure security of their service accounts. Users retain the
private key of these key-pairs, and Google retains ONLY the public key.
System-managed key-pairs are managed automatically by Google, and rotated
daily without user intervention. The private key never leaves Google's
servers to maximize security. Public keys for all service accounts are also
published at the OAuth2 Service Account API.
Enums:
PrivateKeyTypeValueValuesEnum: The output format for the private key. Only
provided in `CreateServiceAccountKey` responses, not in
`GetServiceAccountKey` or `ListServiceAccountKey` responses. Google
never exposes system-managed private keys, and never retains user-
managed private keys.
Fields:
name: The resource name of the service account key in the following format
`projects/{project}/serviceAccounts/{account}/keys/{key}`.
privateKeyData: The private key data. Only provided in
`CreateServiceAccountKey` responses.
privateKeyType: The output format for the private key. Only provided in
`CreateServiceAccountKey` responses, not in `GetServiceAccountKey` or
`ListServiceAccountKey` responses. Google never exposes system-managed
private keys, and never retains user-managed private keys.
publicKeyData: The public key data. Only provided in
`GetServiceAccountKey` responses.
validAfterTime: The key can be used after this timestamp.
validBeforeTime: The key can be used before this timestamp.
"""
class PrivateKeyTypeValueValuesEnum(_messages.Enum):
r"""The output format for the private key. Only provided in
`CreateServiceAccountKey` responses, not in `GetServiceAccountKey` or
`ListServiceAccountKey` responses. Google never exposes system-managed
private keys, and never retains user-managed private keys.
Values:
TYPE_UNSPECIFIED: Unspecified. Equivalent to
`TYPE_GOOGLE_CREDENTIALS_FILE`.
TYPE_PKCS12_FILE: PKCS12 format. The password for the PKCS12 file is
`notasecret`. For more information, see
https://tools.ietf.org/html/rfc7292.
TYPE_GOOGLE_CREDENTIALS_FILE: Google Credentials File format.
"""
TYPE_UNSPECIFIED = 0
TYPE_PKCS12_FILE = 1
TYPE_GOOGLE_CREDENTIALS_FILE = 2
name = _messages.StringField(1)
privateKeyData = _messages.BytesField(2)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | true |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/iam_sample/iam_v1/iam_v1_client.py | samples/iam_sample/iam_v1/iam_v1_client.py | """Generated client library for iam version v1."""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.py import base_api
from samples.iam_sample.iam_v1 import iam_v1_messages as messages
class IamV1(base_api.BaseApiClient):
"""Generated client library for service iam version v1."""
MESSAGES_MODULE = messages
BASE_URL = 'https://iam.googleapis.com/'
MTLS_BASE_URL = ''
_PACKAGE = 'iam'
_SCOPES = ['https://www.googleapis.com/auth/cloud-platform']
_VERSION = 'v1'
_CLIENT_ID = 'CLIENT_ID'
_CLIENT_SECRET = 'CLIENT_SECRET'
_USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_CLIENT_CLASS_NAME = 'IamV1'
_URL_VERSION = 'v1'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None, response_encoding=None):
"""Create a new iam handle."""
url = url or self.BASE_URL
super(IamV1, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers,
response_encoding=response_encoding)
self.iamPolicies = self.IamPoliciesService(self)
self.projects_serviceAccounts_keys = self.ProjectsServiceAccountsKeysService(self)
self.projects_serviceAccounts = self.ProjectsServiceAccountsService(self)
self.projects = self.ProjectsService(self)
self.roles = self.RolesService(self)
class IamPoliciesService(base_api.BaseApiService):
"""Service class for the iamPolicies resource."""
_NAME = 'iamPolicies'
def __init__(self, client):
super(IamV1.IamPoliciesService, self).__init__(client)
self._upload_configs = {
}
def GetPolicyDetails(self, request, global_params=None):
r"""Returns the current IAM policy and the policies on the inherited resources.
that the user has access to.
Args:
request: (GetPolicyDetailsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GetPolicyDetailsResponse) The response message.
"""
config = self.GetMethodConfig('GetPolicyDetails')
return self._RunMethod(
config, request, global_params=global_params)
GetPolicyDetails.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='iam.iamPolicies.getPolicyDetails',
ordered_params=[],
path_params=[],
query_params=[],
relative_path='v1/iamPolicies:getPolicyDetails',
request_field='<request>',
request_type_name='GetPolicyDetailsRequest',
response_type_name='GetPolicyDetailsResponse',
supports_download=False,
)
class ProjectsServiceAccountsKeysService(base_api.BaseApiService):
"""Service class for the projects_serviceAccounts_keys resource."""
_NAME = 'projects_serviceAccounts_keys'
def __init__(self, client):
super(IamV1.ProjectsServiceAccountsKeysService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a ServiceAccountKey.
and returns it.
Args:
request: (IamProjectsServiceAccountsKeysCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ServiceAccountKey) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/keys',
http_method='POST',
method_id='iam.projects.serviceAccounts.keys.create',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1/{+name}/keys',
request_field='createServiceAccountKeyRequest',
request_type_name='IamProjectsServiceAccountsKeysCreateRequest',
response_type_name='ServiceAccountKey',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a ServiceAccountKey.
Args:
request: (IamProjectsServiceAccountsKeysDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Empty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/keys/{keysId}',
http_method='DELETE',
method_id='iam.projects.serviceAccounts.keys.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1/{+name}',
request_field='',
request_type_name='IamProjectsServiceAccountsKeysDeleteRequest',
response_type_name='Empty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the ServiceAccountKey.
by key id.
Args:
request: (IamProjectsServiceAccountsKeysGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ServiceAccountKey) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/keys/{keysId}',
http_method='GET',
method_id='iam.projects.serviceAccounts.keys.get',
ordered_params=['name'],
path_params=['name'],
query_params=['publicKeyType'],
relative_path='v1/{+name}',
request_field='',
request_type_name='IamProjectsServiceAccountsKeysGetRequest',
response_type_name='ServiceAccountKey',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists ServiceAccountKeys.
Args:
request: (IamProjectsServiceAccountsKeysListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListServiceAccountKeysResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/keys',
http_method='GET',
method_id='iam.projects.serviceAccounts.keys.list',
ordered_params=['name'],
path_params=['name'],
query_params=['keyTypes'],
relative_path='v1/{+name}/keys',
request_field='',
request_type_name='IamProjectsServiceAccountsKeysListRequest',
response_type_name='ListServiceAccountKeysResponse',
supports_download=False,
)
class ProjectsServiceAccountsService(base_api.BaseApiService):
"""Service class for the projects_serviceAccounts resource."""
_NAME = 'projects_serviceAccounts'
def __init__(self, client):
super(IamV1.ProjectsServiceAccountsService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a ServiceAccount.
and returns it.
Args:
request: (IamProjectsServiceAccountsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ServiceAccount) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1/projects/{projectsId}/serviceAccounts',
http_method='POST',
method_id='iam.projects.serviceAccounts.create',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1/{+name}/serviceAccounts',
request_field='createServiceAccountRequest',
request_type_name='IamProjectsServiceAccountsCreateRequest',
response_type_name='ServiceAccount',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a ServiceAccount.
Args:
request: (IamProjectsServiceAccountsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Empty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}',
http_method='DELETE',
method_id='iam.projects.serviceAccounts.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1/{+name}',
request_field='',
request_type_name='IamProjectsServiceAccountsDeleteRequest',
response_type_name='Empty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a ServiceAccount.
Args:
request: (IamProjectsServiceAccountsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ServiceAccount) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}',
http_method='GET',
method_id='iam.projects.serviceAccounts.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1/{+name}',
request_field='',
request_type_name='IamProjectsServiceAccountsGetRequest',
response_type_name='ServiceAccount',
supports_download=False,
)
def GetIamPolicy(self, request, global_params=None):
r"""Returns the IAM access control policy for specified IAM resource.
Args:
request: (IamProjectsServiceAccountsGetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('GetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:getIamPolicy',
http_method='POST',
method_id='iam.projects.serviceAccounts.getIamPolicy',
ordered_params=['resource'],
path_params=['resource'],
query_params=['options_requestedPolicyVersion'],
relative_path='v1/{+resource}:getIamPolicy',
request_field='',
request_type_name='IamProjectsServiceAccountsGetIamPolicyRequest',
response_type_name='Policy',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists ServiceAccounts for a project.
Args:
request: (IamProjectsServiceAccountsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListServiceAccountsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1/projects/{projectsId}/serviceAccounts',
http_method='GET',
method_id='iam.projects.serviceAccounts.list',
ordered_params=['name'],
path_params=['name'],
query_params=['pageSize', 'pageToken', 'removeDeletedServiceAccounts'],
relative_path='v1/{+name}/serviceAccounts',
request_field='',
request_type_name='IamProjectsServiceAccountsListRequest',
response_type_name='ListServiceAccountsResponse',
supports_download=False,
)
def SetIamPolicy(self, request, global_params=None):
r"""Sets the IAM access control policy for the specified IAM resource.
Args:
request: (IamProjectsServiceAccountsSetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('SetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:setIamPolicy',
http_method='POST',
method_id='iam.projects.serviceAccounts.setIamPolicy',
ordered_params=['resource'],
path_params=['resource'],
query_params=[],
relative_path='v1/{+resource}:setIamPolicy',
request_field='setIamPolicyRequest',
request_type_name='IamProjectsServiceAccountsSetIamPolicyRequest',
response_type_name='Policy',
supports_download=False,
)
def SignBlob(self, request, global_params=None):
r"""Signs a blob using a service account's system-managed private key.
Args:
request: (IamProjectsServiceAccountsSignBlobRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SignBlobResponse) The response message.
"""
config = self.GetMethodConfig('SignBlob')
return self._RunMethod(
config, request, global_params=global_params)
SignBlob.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:signBlob',
http_method='POST',
method_id='iam.projects.serviceAccounts.signBlob',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1/{+name}:signBlob',
request_field='signBlobRequest',
request_type_name='IamProjectsServiceAccountsSignBlobRequest',
response_type_name='SignBlobResponse',
supports_download=False,
)
def SignJwt(self, request, global_params=None):
r"""Signs a JWT using a service account's system-managed private key.
If no `exp` (expiry) time is contained in the claims, we will
provide an expiry of one hour in the future. If an expiry
of more than one hour in the future is requested, the request
will fail.
Args:
request: (IamProjectsServiceAccountsSignJwtRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SignJwtResponse) The response message.
"""
config = self.GetMethodConfig('SignJwt')
return self._RunMethod(
config, request, global_params=global_params)
SignJwt.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:signJwt',
http_method='POST',
method_id='iam.projects.serviceAccounts.signJwt',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1/{+name}:signJwt',
request_field='signJwtRequest',
request_type_name='IamProjectsServiceAccountsSignJwtRequest',
response_type_name='SignJwtResponse',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
r"""Tests the specified permissions against the IAM access control policy.
for the specified IAM resource.
Args:
request: (IamProjectsServiceAccountsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestIamPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:testIamPermissions',
http_method='POST',
method_id='iam.projects.serviceAccounts.testIamPermissions',
ordered_params=['resource'],
path_params=['resource'],
query_params=[],
relative_path='v1/{+resource}:testIamPermissions',
request_field='testIamPermissionsRequest',
request_type_name='IamProjectsServiceAccountsTestIamPermissionsRequest',
response_type_name='TestIamPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
r"""Updates a ServiceAccount.
Currently, only the following fields are updatable:
`display_name` .
The `etag` is mandatory.
Args:
request: (ServiceAccount) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ServiceAccount) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}',
http_method='PUT',
method_id='iam.projects.serviceAccounts.update',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1/{+name}',
request_field='<request>',
request_type_name='ServiceAccount',
response_type_name='ServiceAccount',
supports_download=False,
)
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
_NAME = 'projects'
def __init__(self, client):
super(IamV1.ProjectsService, self).__init__(client)
self._upload_configs = {
}
class RolesService(base_api.BaseApiService):
"""Service class for the roles resource."""
_NAME = 'roles'
def __init__(self, client):
super(IamV1.RolesService, self).__init__(client)
self._upload_configs = {
}
def QueryGrantableRoles(self, request, global_params=None):
r"""Queries roles that can be granted on a particular resource.
Args:
request: (QueryGrantableRolesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(QueryGrantableRolesResponse) The response message.
"""
config = self.GetMethodConfig('QueryGrantableRoles')
return self._RunMethod(
config, request, global_params=global_params)
QueryGrantableRoles.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='iam.roles.queryGrantableRoles',
ordered_params=[],
path_params=[],
query_params=[],
relative_path='v1/roles:queryGrantableRoles',
request_field='<request>',
request_type_name='QueryGrantableRolesRequest',
response_type_name='QueryGrantableRolesResponse',
supports_download=False,
)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/iam_sample/iam_v1/__init__.py | samples/iam_sample/iam_v1/__init__.py | """Package marker file."""
from __future__ import absolute_import
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/bigquery_sample/bigquery_v2/bigquery_v2_messages.py | samples/bigquery_sample/bigquery_v2/bigquery_v2_messages.py | """Generated message classes for bigquery version v2.
A data platform for customers to create, manage, share and query data.
"""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.protorpclite import messages as _messages
from apitools.base.py import encoding
from apitools.base.py import extra_types
package = 'bigquery'
class BigqueryDatasetsDeleteRequest(_messages.Message):
r"""A BigqueryDatasetsDeleteRequest object.
Fields:
datasetId: Dataset ID of dataset being deleted
deleteContents: If True, delete all the tables in the dataset. If False
and the dataset contains tables, the request will fail. Default is False
projectId: Project ID of the dataset being deleted
"""
datasetId = _messages.StringField(1, required=True)
deleteContents = _messages.BooleanField(2)
projectId = _messages.StringField(3, required=True)
class BigqueryDatasetsDeleteResponse(_messages.Message):
r"""An empty BigqueryDatasetsDelete response."""
class BigqueryDatasetsGetRequest(_messages.Message):
r"""A BigqueryDatasetsGetRequest object.
Fields:
datasetId: Dataset ID of the requested dataset
projectId: Project ID of the requested dataset
"""
datasetId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
class BigqueryDatasetsInsertRequest(_messages.Message):
r"""A BigqueryDatasetsInsertRequest object.
Fields:
dataset: A Dataset resource to be passed as the request body.
projectId: Project ID of the new dataset
"""
dataset = _messages.MessageField('Dataset', 1)
projectId = _messages.StringField(2, required=True)
class BigqueryDatasetsListRequest(_messages.Message):
r"""A BigqueryDatasetsListRequest object.
Fields:
all: Whether to list all datasets, including hidden ones
filter: An expression for filtering the results of the request by label.
The syntax is "labels.[:]". Multiple filters can be ANDed together by
connecting with a space. Example: "labels.department:receiving
labels.active". See https://cloud.google.com/bigquery/docs/labeling-
datasets#filtering_datasets_using_labels for details.
maxResults: The maximum number of results to return
pageToken: Page token, returned by a previous call, to request the next
page of results
projectId: Project ID of the datasets to be listed
"""
all = _messages.BooleanField(1)
filter = _messages.StringField(2)
maxResults = _messages.IntegerField(3, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(4)
projectId = _messages.StringField(5, required=True)
class BigqueryDatasetsPatchRequest(_messages.Message):
r"""A BigqueryDatasetsPatchRequest object.
Fields:
dataset: A Dataset resource to be passed as the request body.
datasetId: Dataset ID of the dataset being updated
projectId: Project ID of the dataset being updated
"""
dataset = _messages.MessageField('Dataset', 1)
datasetId = _messages.StringField(2, required=True)
projectId = _messages.StringField(3, required=True)
class BigqueryDatasetsUpdateRequest(_messages.Message):
r"""A BigqueryDatasetsUpdateRequest object.
Fields:
dataset: A Dataset resource to be passed as the request body.
datasetId: Dataset ID of the dataset being updated
projectId: Project ID of the dataset being updated
"""
dataset = _messages.MessageField('Dataset', 1)
datasetId = _messages.StringField(2, required=True)
projectId = _messages.StringField(3, required=True)
class BigqueryJobsCancelRequest(_messages.Message):
r"""A BigqueryJobsCancelRequest object.
Fields:
jobId: [Required] Job ID of the job to cancel
projectId: [Required] Project ID of the job to cancel
"""
jobId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
class BigqueryJobsGetQueryResultsRequest(_messages.Message):
r"""A BigqueryJobsGetQueryResultsRequest object.
Fields:
jobId: [Required] Job ID of the query job
maxResults: Maximum number of results to read
pageToken: Page token, returned by a previous call, to request the next
page of results
projectId: [Required] Project ID of the query job
startIndex: Zero-based index of the starting row
timeoutMs: How long to wait for the query to complete, in milliseconds,
before returning. Default is 10 seconds. If the timeout passes before
the job completes, the 'jobComplete' field in the response will be false
"""
jobId = _messages.StringField(1, required=True)
maxResults = _messages.IntegerField(2, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(3)
projectId = _messages.StringField(4, required=True)
startIndex = _messages.IntegerField(5, variant=_messages.Variant.UINT64)
timeoutMs = _messages.IntegerField(6, variant=_messages.Variant.UINT32)
class BigqueryJobsGetRequest(_messages.Message):
r"""A BigqueryJobsGetRequest object.
Fields:
jobId: [Required] Job ID of the requested job
projectId: [Required] Project ID of the requested job
"""
jobId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
class BigqueryJobsInsertRequest(_messages.Message):
r"""A BigqueryJobsInsertRequest object.
Fields:
job: A Job resource to be passed as the request body.
projectId: Project ID of the project that will be billed for the job
"""
job = _messages.MessageField('Job', 1)
projectId = _messages.StringField(2, required=True)
class BigqueryJobsListRequest(_messages.Message):
r"""A BigqueryJobsListRequest object.
Enums:
ProjectionValueValuesEnum: Restrict information returned to a set of
selected fields
StateFilterValueValuesEnum: Filter for job state
Fields:
allUsers: Whether to display jobs owned by all users in the project.
Default false
maxResults: Maximum number of results to return
pageToken: Page token, returned by a previous call, to request the next
page of results
projectId: Project ID of the jobs to list
projection: Restrict information returned to a set of selected fields
stateFilter: Filter for job state
"""
class ProjectionValueValuesEnum(_messages.Enum):
r"""Restrict information returned to a set of selected fields
Values:
full: Includes all job data
minimal: Does not include the job configuration
"""
full = 0
minimal = 1
class StateFilterValueValuesEnum(_messages.Enum):
r"""Filter for job state
Values:
done: Finished jobs
pending: Pending jobs
running: Running jobs
"""
done = 0
pending = 1
running = 2
allUsers = _messages.BooleanField(1)
maxResults = _messages.IntegerField(2, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(3)
projectId = _messages.StringField(4, required=True)
projection = _messages.EnumField('ProjectionValueValuesEnum', 5)
stateFilter = _messages.EnumField('StateFilterValueValuesEnum', 6, repeated=True)
class BigqueryJobsQueryRequest(_messages.Message):
r"""A BigqueryJobsQueryRequest object.
Fields:
projectId: Project ID of the project billed for the query
queryRequest: A QueryRequest resource to be passed as the request body.
"""
projectId = _messages.StringField(1, required=True)
queryRequest = _messages.MessageField('QueryRequest', 2)
class BigqueryProjectsListRequest(_messages.Message):
r"""A BigqueryProjectsListRequest object.
Fields:
maxResults: Maximum number of results to return
pageToken: Page token, returned by a previous call, to request the next
page of results
"""
maxResults = _messages.IntegerField(1, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(2)
class BigqueryTabledataInsertAllRequest(_messages.Message):
r"""A BigqueryTabledataInsertAllRequest object.
Fields:
datasetId: Dataset ID of the destination table.
projectId: Project ID of the destination table.
tableDataInsertAllRequest: A TableDataInsertAllRequest resource to be
passed as the request body.
tableId: Table ID of the destination table.
"""
datasetId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
tableDataInsertAllRequest = _messages.MessageField('TableDataInsertAllRequest', 3)
tableId = _messages.StringField(4, required=True)
class BigqueryTabledataListRequest(_messages.Message):
r"""A BigqueryTabledataListRequest object.
Fields:
datasetId: Dataset ID of the table to read
maxResults: Maximum number of results to return
pageToken: Page token, returned by a previous call, identifying the result
set
projectId: Project ID of the table to read
startIndex: Zero-based index of the starting row to read
tableId: Table ID of the table to read
"""
datasetId = _messages.StringField(1, required=True)
maxResults = _messages.IntegerField(2, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(3)
projectId = _messages.StringField(4, required=True)
startIndex = _messages.IntegerField(5, variant=_messages.Variant.UINT64)
tableId = _messages.StringField(6, required=True)
class BigqueryTablesDeleteRequest(_messages.Message):
r"""A BigqueryTablesDeleteRequest object.
Fields:
datasetId: Dataset ID of the table to delete
projectId: Project ID of the table to delete
tableId: Table ID of the table to delete
"""
datasetId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
tableId = _messages.StringField(3, required=True)
class BigqueryTablesDeleteResponse(_messages.Message):
r"""An empty BigqueryTablesDelete response."""
class BigqueryTablesGetRequest(_messages.Message):
r"""A BigqueryTablesGetRequest object.
Fields:
datasetId: Dataset ID of the requested table
projectId: Project ID of the requested table
tableId: Table ID of the requested table
"""
datasetId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
tableId = _messages.StringField(3, required=True)
class BigqueryTablesInsertRequest(_messages.Message):
r"""A BigqueryTablesInsertRequest object.
Fields:
datasetId: Dataset ID of the new table
projectId: Project ID of the new table
table: A Table resource to be passed as the request body.
"""
datasetId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
table = _messages.MessageField('Table', 3)
class BigqueryTablesListRequest(_messages.Message):
r"""A BigqueryTablesListRequest object.
Fields:
datasetId: Dataset ID of the tables to list
maxResults: Maximum number of results to return
pageToken: Page token, returned by a previous call, to request the next
page of results
projectId: Project ID of the tables to list
"""
datasetId = _messages.StringField(1, required=True)
maxResults = _messages.IntegerField(2, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(3)
projectId = _messages.StringField(4, required=True)
class BigqueryTablesPatchRequest(_messages.Message):
r"""A BigqueryTablesPatchRequest object.
Fields:
datasetId: Dataset ID of the table to update
projectId: Project ID of the table to update
table: A Table resource to be passed as the request body.
tableId: Table ID of the table to update
"""
datasetId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
table = _messages.MessageField('Table', 3)
tableId = _messages.StringField(4, required=True)
class BigqueryTablesUpdateRequest(_messages.Message):
r"""A BigqueryTablesUpdateRequest object.
Fields:
datasetId: Dataset ID of the table to update
projectId: Project ID of the table to update
table: A Table resource to be passed as the request body.
tableId: Table ID of the table to update
"""
datasetId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
table = _messages.MessageField('Table', 3)
tableId = _messages.StringField(4, required=True)
class BigtableColumn(_messages.Message):
r"""A BigtableColumn object.
Fields:
encoding: [Optional] The encoding of the values when the type is not
STRING. Acceptable encoding values are: TEXT - indicates values are
alphanumeric text strings. BINARY - indicates values are encoded using
HBase Bytes.toBytes family of functions. 'encoding' can also be set at
the column family level. However, the setting at this level takes
precedence if 'encoding' is set at both levels.
fieldName: [Optional] If the qualifier is not a valid BigQuery field
identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier
must be provided as the column field name and is used as field name in
queries.
onlyReadLatest: [Optional] If this is set, only the latest version of
value in this column are exposed. 'onlyReadLatest' can also be set at
the column family level. However, the setting at this level takes
precedence if 'onlyReadLatest' is set at both levels.
qualifierEncoded: [Required] Qualifier of the column. Columns in the
parent column family that has this exact qualifier are exposed as .
field. If the qualifier is valid UTF-8 string, it can be specified in
the qualifier_string field. Otherwise, a base-64 encoded value must be
set to qualifier_encoded. The column field name is the same as the
column qualifier. However, if the qualifier is not a valid BigQuery
field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid
identifier must be provided as field_name.
qualifierString: A string attribute.
type: [Optional] The type to convert the value in cells of this column.
The values are expected to be encoded using HBase Bytes.toBytes function
when using the BINARY encoding value. Following BigQuery types are
allowed (case-sensitive) - BYTES STRING INTEGER FLOAT BOOLEAN Default
type is BYTES. 'type' can also be set at the column family level.
However, the setting at this level takes precedence if 'type' is set at
both levels.
"""
encoding = _messages.StringField(1)
fieldName = _messages.StringField(2)
onlyReadLatest = _messages.BooleanField(3)
qualifierEncoded = _messages.BytesField(4)
qualifierString = _messages.StringField(5)
type = _messages.StringField(6)
class BigtableColumnFamily(_messages.Message):
r"""A BigtableColumnFamily object.
Fields:
columns: [Optional] Lists of columns that should be exposed as individual
fields as opposed to a list of (column name, value) pairs. All columns
whose qualifier matches a qualifier in this list can be accessed as ..
Other columns can be accessed as a list through .Column field.
encoding: [Optional] The encoding of the values when the type is not
STRING. Acceptable encoding values are: TEXT - indicates values are
alphanumeric text strings. BINARY - indicates values are encoded using
HBase Bytes.toBytes family of functions. This can be overridden for a
specific column by listing that column in 'columns' and specifying an
encoding for it.
familyId: Identifier of the column family.
onlyReadLatest: [Optional] If this is set only the latest version of value
are exposed for all columns in this column family. This can be
overridden for a specific column by listing that column in 'columns' and
specifying a different setting for that column.
type: [Optional] The type to convert the value in cells of this column
family. The values are expected to be encoded using HBase Bytes.toBytes
function when using the BINARY encoding value. Following BigQuery types
are allowed (case-sensitive) - BYTES STRING INTEGER FLOAT BOOLEAN
Default type is BYTES. This can be overridden for a specific column by
listing that column in 'columns' and specifying a type for it.
"""
columns = _messages.MessageField('BigtableColumn', 1, repeated=True)
encoding = _messages.StringField(2)
familyId = _messages.StringField(3)
onlyReadLatest = _messages.BooleanField(4)
type = _messages.StringField(5)
class BigtableOptions(_messages.Message):
r"""A BigtableOptions object.
Fields:
columnFamilies: [Optional] List of column families to expose in the table
schema along with their types. This list restricts the column families
that can be referenced in queries and specifies their value types. You
can use this list to do type conversions - see the 'type' field for more
details. If you leave this list empty, all column families are present
in the table schema and their values are read as BYTES. During a query
only the column families referenced in that query are read from
Bigtable.
ignoreUnspecifiedColumnFamilies: [Optional] If field is true, then the
column families that are not specified in columnFamilies list are not
exposed in the table schema. Otherwise, they are read with BYTES type
values. The default value is false.
readRowkeyAsString: [Optional] If field is true, then the rowkey column
families will be read and converted to string. Otherwise they are read
with BYTES type values and users need to manually cast them with CAST if
necessary. The default value is false.
"""
columnFamilies = _messages.MessageField('BigtableColumnFamily', 1, repeated=True)
ignoreUnspecifiedColumnFamilies = _messages.BooleanField(2)
readRowkeyAsString = _messages.BooleanField(3)
class CsvOptions(_messages.Message):
r"""A CsvOptions object.
Fields:
allowJaggedRows: [Optional] Indicates if BigQuery should accept rows that
are missing trailing optional columns. If true, BigQuery treats missing
trailing columns as null values. If false, records with missing trailing
columns are treated as bad records, and if there are too many bad
records, an invalid error is returned in the job result. The default
value is false.
allowQuotedNewlines: [Optional] Indicates if BigQuery should allow quoted
data sections that contain newline characters in a CSV file. The default
value is false.
encoding: [Optional] The character encoding of the data. The supported
values are UTF-8 or ISO-8859-1. The default value is UTF-8. BigQuery
decodes the data after the raw, binary data has been split using the
values of the quote and fieldDelimiter properties.
fieldDelimiter: [Optional] The separator for fields in a CSV file.
BigQuery converts the string to ISO-8859-1 encoding, and then uses the
first byte of the encoded string to split the data in its raw, binary
state. BigQuery also supports the escape sequence "\t" to specify a tab
separator. The default value is a comma (',').
quote: [Optional] The value that is used to quote data sections in a CSV
file. BigQuery converts the string to ISO-8859-1 encoding, and then uses
the first byte of the encoded string to split the data in its raw,
binary state. The default value is a double-quote ('"'). If your data
does not contain quoted sections, set the property value to an empty
string. If your data contains quoted newline characters, you must also
set the allowQuotedNewlines property to true.
skipLeadingRows: [Optional] The number of rows at the top of a CSV file
that BigQuery will skip when reading the data. The default value is 0.
This property is useful if you have header rows in the file that should
be skipped.
"""
allowJaggedRows = _messages.BooleanField(1)
allowQuotedNewlines = _messages.BooleanField(2)
encoding = _messages.StringField(3)
fieldDelimiter = _messages.StringField(4)
quote = _messages.StringField(5, default='"')
skipLeadingRows = _messages.IntegerField(6)
class Dataset(_messages.Message):
r"""A Dataset object.
Messages:
AccessValueListEntry: A AccessValueListEntry object.
LabelsValue: [Experimental] The labels associated with this dataset. You
can use these to organize and group your datasets. You can set this
property when inserting or updating a dataset. Label keys and values can
be no longer than 63 characters, can only contain letters, numeric
characters, underscores and dashes. International characters are
allowed. Label values are optional. Label keys must start with a letter
and must be unique within a dataset. Both keys and values are
additionally constrained to be <= 128 bytes in size.
Fields:
access: [Optional] An array of objects that define dataset access for one
or more entities. You can set this property when inserting or updating a
dataset in order to control who is allowed to access the data. If
unspecified at dataset creation time, BigQuery adds default dataset
access for the following entities: access.specialGroup: projectReaders;
access.role: READER; access.specialGroup: projectWriters; access.role:
WRITER; access.specialGroup: projectOwners; access.role: OWNER;
access.userByEmail: [dataset creator email]; access.role: OWNER;
creationTime: [Output-only] The time when this dataset was created, in
milliseconds since the epoch.
datasetReference: [Required] A reference that identifies the dataset.
defaultTableExpirationMs: [Optional] The default lifetime of all tables in
the dataset, in milliseconds. The minimum value is 3600000 milliseconds
(one hour). Once this property is set, all newly-created tables in the
dataset will have an expirationTime property set to the creation time
plus the value in this property, and changing the value will only affect
new tables, not existing ones. When the expirationTime for a given table
is reached, that table will be deleted automatically. If a table's
expirationTime is modified or removed before the table expires, or if
you provide an explicit expirationTime when creating a table, that value
takes precedence over the default expiration time indicated by this
property.
description: [Optional] A user-friendly description of the dataset.
etag: [Output-only] A hash of the resource.
friendlyName: [Optional] A descriptive name for the dataset.
id: [Output-only] The fully-qualified unique name of the dataset in the
format projectId:datasetId. The dataset name without the project name is
given in the datasetId field. When creating a new dataset, leave this
field blank, and instead specify the datasetId field.
kind: [Output-only] The resource type.
labels: [Experimental] The labels associated with this dataset. You can
use these to organize and group your datasets. You can set this property
when inserting or updating a dataset. Label keys and values can be no
longer than 63 characters, can only contain letters, numeric characters,
underscores and dashes. International characters are allowed. Label
values are optional. Label keys must start with a letter and must be
unique within a dataset. Both keys and values are additionally
constrained to be <= 128 bytes in size.
lastModifiedTime: [Output-only] The date when this dataset or any of its
tables was last modified, in milliseconds since the epoch.
location: [Experimental] The geographic location where the dataset should
reside. Possible values include EU and US. The default value is US.
selfLink: [Output-only] A URL that can be used to access the resource
again. You can use this URL in Get or Update requests to the resource.
"""
class AccessValueListEntry(_messages.Message):
r"""A AccessValueListEntry object.
Fields:
domain: [Pick one] A domain to grant access to. Any users signed in with
the domain specified will be granted the specified access. Example:
"example.com".
groupByEmail: [Pick one] An email address of a Google Group to grant
access to.
role: [Required] Describes the rights granted to the user specified by
the other member of the access object. The following string values are
supported: READER, WRITER, OWNER.
specialGroup: [Pick one] A special group to grant access to. Possible
values include: projectOwners: Owners of the enclosing project.
projectReaders: Readers of the enclosing project. projectWriters:
Writers of the enclosing project. allAuthenticatedUsers: All
authenticated BigQuery users.
userByEmail: [Pick one] An email address of a user to grant access to.
For example: fred@example.com.
view: [Pick one] A view from a different dataset to grant access to.
Queries executed against that view will have read access to tables in
this dataset. The role field is not required when this field is set.
If that view is updated by any user, access to the view needs to be
granted again via an update operation.
"""
domain = _messages.StringField(1)
groupByEmail = _messages.StringField(2)
role = _messages.StringField(3)
specialGroup = _messages.StringField(4)
userByEmail = _messages.StringField(5)
view = _messages.MessageField('TableReference', 6)
@encoding.MapUnrecognizedFields('additionalProperties')
class LabelsValue(_messages.Message):
r"""[Experimental] The labels associated with this dataset. You can use
these to organize and group your datasets. You can set this property when
inserting or updating a dataset. Label keys and values can be no longer
than 63 characters, can only contain letters, numeric characters,
underscores and dashes. International characters are allowed. Label values
are optional. Label keys must start with a letter and must be unique
within a dataset. Both keys and values are additionally constrained to be
<= 128 bytes in size.
Messages:
AdditionalProperty: An additional property for a LabelsValue object.
Fields:
additionalProperties: Additional properties of type LabelsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a LabelsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
access = _messages.MessageField('AccessValueListEntry', 1, repeated=True)
creationTime = _messages.IntegerField(2)
datasetReference = _messages.MessageField('DatasetReference', 3)
defaultTableExpirationMs = _messages.IntegerField(4)
description = _messages.StringField(5)
etag = _messages.StringField(6)
friendlyName = _messages.StringField(7)
id = _messages.StringField(8)
kind = _messages.StringField(9, default='bigquery#dataset')
labels = _messages.MessageField('LabelsValue', 10)
lastModifiedTime = _messages.IntegerField(11)
location = _messages.StringField(12)
selfLink = _messages.StringField(13)
class DatasetList(_messages.Message):
r"""A DatasetList object.
Messages:
DatasetsValueListEntry: A DatasetsValueListEntry object.
Fields:
datasets: An array of the dataset resources in the project. Each resource
contains basic information. For full information about a particular
dataset resource, use the Datasets: get method. This property is omitted
when there are no datasets in the project.
etag: A hash value of the results page. You can use this property to
determine if the page has changed since the last request.
kind: The list type. This property always returns the value
"bigquery#datasetList".
nextPageToken: A token that can be used to request the next results page.
This property is omitted on the final results page.
"""
class DatasetsValueListEntry(_messages.Message):
r"""A DatasetsValueListEntry object.
Messages:
LabelsValue: [Experimental] The labels associated with this dataset. You
can use these to organize and group your datasets.
Fields:
datasetReference: The dataset reference. Use this property to access
specific parts of the dataset's ID, such as project ID or dataset ID.
friendlyName: A descriptive name for the dataset, if one exists.
id: The fully-qualified, unique, opaque ID of the dataset.
kind: The resource type. This property always returns the value
"bigquery#dataset".
labels: [Experimental] The labels associated with this dataset. You can
use these to organize and group your datasets.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class LabelsValue(_messages.Message):
r"""[Experimental] The labels associated with this dataset. You can use
these to organize and group your datasets.
Messages:
AdditionalProperty: An additional property for a LabelsValue object.
Fields:
additionalProperties: Additional properties of type LabelsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a LabelsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
datasetReference = _messages.MessageField('DatasetReference', 1)
friendlyName = _messages.StringField(2)
id = _messages.StringField(3)
kind = _messages.StringField(4, default='bigquery#dataset')
labels = _messages.MessageField('LabelsValue', 5)
datasets = _messages.MessageField('DatasetsValueListEntry', 1, repeated=True)
etag = _messages.StringField(2)
kind = _messages.StringField(3, default='bigquery#datasetList')
nextPageToken = _messages.StringField(4)
class DatasetReference(_messages.Message):
r"""A DatasetReference object.
Fields:
datasetId: [Required] A unique ID for this dataset, without the project
name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or
underscores (_). The maximum length is 1,024 characters.
projectId: [Optional] The ID of the project containing this dataset.
"""
datasetId = _messages.StringField(1)
projectId = _messages.StringField(2)
class ErrorProto(_messages.Message):
r"""A ErrorProto object.
Fields:
debugInfo: Debugging information. This property is internal to Google and
should not be used.
location: Specifies where the error occurred, if present.
message: A human-readable description of the error.
reason: A short error code that summarizes the error.
"""
debugInfo = _messages.StringField(1)
location = _messages.StringField(2)
message = _messages.StringField(3)
reason = _messages.StringField(4)
class ExplainQueryStage(_messages.Message):
r"""A ExplainQueryStage object.
Fields:
computeRatioAvg: Relative amount of time the average shard spent on CPU-
bound tasks.
computeRatioMax: Relative amount of time the slowest shard spent on CPU-
bound tasks.
id: Unique ID for stage within plan.
name: Human-readable name for stage.
readRatioAvg: Relative amount of time the average shard spent reading
input.
readRatioMax: Relative amount of time the slowest shard spent reading
input.
recordsRead: Number of records read into the stage.
recordsWritten: Number of records written by the stage.
steps: List of operations within the stage in dependency order
(approximately chronological).
waitRatioAvg: Relative amount of time the average shard spent waiting to
be scheduled.
waitRatioMax: Relative amount of time the slowest shard spent waiting to
be scheduled.
writeRatioAvg: Relative amount of time the average shard spent on writing
output.
writeRatioMax: Relative amount of time the slowest shard spent on writing
output.
"""
computeRatioAvg = _messages.FloatField(1)
computeRatioMax = _messages.FloatField(2)
id = _messages.IntegerField(3)
name = _messages.StringField(4)
readRatioAvg = _messages.FloatField(5)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | true |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/bigquery_sample/bigquery_v2/bigquery_v2_client.py | samples/bigquery_sample/bigquery_v2/bigquery_v2_client.py | """Generated client library for bigquery version v2."""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.py import base_api
from samples.bigquery_sample.bigquery_v2 import bigquery_v2_messages as messages
class BigqueryV2(base_api.BaseApiClient):
"""Generated client library for service bigquery version v2."""
MESSAGES_MODULE = messages
BASE_URL = 'https://www.googleapis.com/bigquery/v2/'
MTLS_BASE_URL = ''
_PACKAGE = 'bigquery'
_SCOPES = ['https://www.googleapis.com/auth/bigquery', 'https://www.googleapis.com/auth/bigquery.insertdata', 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/devstorage.full_control', 'https://www.googleapis.com/auth/devstorage.read_only', 'https://www.googleapis.com/auth/devstorage.read_write']
_VERSION = 'v2'
_CLIENT_ID = 'CLIENT_ID'
_CLIENT_SECRET = 'CLIENT_SECRET'
_USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_CLIENT_CLASS_NAME = 'BigqueryV2'
_URL_VERSION = 'v2'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None, response_encoding=None):
"""Create a new bigquery handle."""
url = url or self.BASE_URL
super(BigqueryV2, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers,
response_encoding=response_encoding)
self.datasets = self.DatasetsService(self)
self.jobs = self.JobsService(self)
self.projects = self.ProjectsService(self)
self.tabledata = self.TabledataService(self)
self.tables = self.TablesService(self)
class DatasetsService(base_api.BaseApiService):
"""Service class for the datasets resource."""
_NAME = 'datasets'
def __init__(self, client):
super(BigqueryV2.DatasetsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
r"""Deletes the dataset specified by the datasetId value. Before you can delete a dataset, you must delete all its tables, either manually or by specifying deleteContents. Immediately after deletion, you can create another dataset with the same name.
Args:
request: (BigqueryDatasetsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BigqueryDatasetsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method='DELETE',
method_id='bigquery.datasets.delete',
ordered_params=['projectId', 'datasetId'],
path_params=['datasetId', 'projectId'],
query_params=['deleteContents'],
relative_path='projects/{projectId}/datasets/{datasetId}',
request_field='',
request_type_name='BigqueryDatasetsDeleteRequest',
response_type_name='BigqueryDatasetsDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Returns the dataset specified by datasetID.
Args:
request: (BigqueryDatasetsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Dataset) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='bigquery.datasets.get',
ordered_params=['projectId', 'datasetId'],
path_params=['datasetId', 'projectId'],
query_params=[],
relative_path='projects/{projectId}/datasets/{datasetId}',
request_field='',
request_type_name='BigqueryDatasetsGetRequest',
response_type_name='Dataset',
supports_download=False,
)
def Insert(self, request, global_params=None):
r"""Creates a new empty dataset.
Args:
request: (BigqueryDatasetsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Dataset) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='bigquery.datasets.insert',
ordered_params=['projectId'],
path_params=['projectId'],
query_params=[],
relative_path='projects/{projectId}/datasets',
request_field='dataset',
request_type_name='BigqueryDatasetsInsertRequest',
response_type_name='Dataset',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists all datasets in the specified project to which you have been granted the READER dataset role.
Args:
request: (BigqueryDatasetsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DatasetList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='bigquery.datasets.list',
ordered_params=['projectId'],
path_params=['projectId'],
query_params=['all', 'filter', 'maxResults', 'pageToken'],
relative_path='projects/{projectId}/datasets',
request_field='',
request_type_name='BigqueryDatasetsListRequest',
response_type_name='DatasetList',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource. This method supports patch semantics.
Args:
request: (BigqueryDatasetsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Dataset) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method='PATCH',
method_id='bigquery.datasets.patch',
ordered_params=['projectId', 'datasetId'],
path_params=['datasetId', 'projectId'],
query_params=[],
relative_path='projects/{projectId}/datasets/{datasetId}',
request_field='dataset',
request_type_name='BigqueryDatasetsPatchRequest',
response_type_name='Dataset',
supports_download=False,
)
def Update(self, request, global_params=None):
r"""Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource.
Args:
request: (BigqueryDatasetsUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Dataset) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method='PUT',
method_id='bigquery.datasets.update',
ordered_params=['projectId', 'datasetId'],
path_params=['datasetId', 'projectId'],
query_params=[],
relative_path='projects/{projectId}/datasets/{datasetId}',
request_field='dataset',
request_type_name='BigqueryDatasetsUpdateRequest',
response_type_name='Dataset',
supports_download=False,
)
class JobsService(base_api.BaseApiService):
"""Service class for the jobs resource."""
_NAME = 'jobs'
def __init__(self, client):
super(BigqueryV2.JobsService, self).__init__(client)
self._upload_configs = {
'Insert': base_api.ApiUploadInfo(
accept=['*/*'],
max_size=None,
resumable_multipart=True,
resumable_path='/resumable/upload/bigquery/v2/projects/{projectId}/jobs',
simple_multipart=True,
simple_path='/upload/bigquery/v2/projects/{projectId}/jobs',
),
}
def Cancel(self, request, global_params=None):
r"""Requests that a job be cancelled. This call will return immediately, and the client will need to poll for the job status to see if the cancel completed successfully. Cancelled jobs may still incur costs.
Args:
request: (BigqueryJobsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(JobCancelResponse) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='bigquery.jobs.cancel',
ordered_params=['projectId', 'jobId'],
path_params=['jobId', 'projectId'],
query_params=[],
relative_path='project/{projectId}/jobs/{jobId}/cancel',
request_field='',
request_type_name='BigqueryJobsCancelRequest',
response_type_name='JobCancelResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Returns information about a specific job. Job information is available for a six month period after creation. Requires that you're the person who ran the job, or have the Is Owner project role.
Args:
request: (BigqueryJobsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Job) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='bigquery.jobs.get',
ordered_params=['projectId', 'jobId'],
path_params=['jobId', 'projectId'],
query_params=[],
relative_path='projects/{projectId}/jobs/{jobId}',
request_field='',
request_type_name='BigqueryJobsGetRequest',
response_type_name='Job',
supports_download=False,
)
def GetQueryResults(self, request, global_params=None):
r"""Retrieves the results of a query job.
Args:
request: (BigqueryJobsGetQueryResultsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GetQueryResultsResponse) The response message.
"""
config = self.GetMethodConfig('GetQueryResults')
return self._RunMethod(
config, request, global_params=global_params)
GetQueryResults.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='bigquery.jobs.getQueryResults',
ordered_params=['projectId', 'jobId'],
path_params=['jobId', 'projectId'],
query_params=['maxResults', 'pageToken', 'startIndex', 'timeoutMs'],
relative_path='projects/{projectId}/queries/{jobId}',
request_field='',
request_type_name='BigqueryJobsGetQueryResultsRequest',
response_type_name='GetQueryResultsResponse',
supports_download=False,
)
def Insert(self, request, global_params=None, upload=None):
r"""Starts a new asynchronous job. Requires the Can View project role.
Args:
request: (BigqueryJobsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
upload: (Upload, default: None) If present, upload
this stream with the request.
Returns:
(Job) The response message.
"""
config = self.GetMethodConfig('Insert')
upload_config = self.GetUploadConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params,
upload=upload, upload_config=upload_config)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='bigquery.jobs.insert',
ordered_params=['projectId'],
path_params=['projectId'],
query_params=[],
relative_path='projects/{projectId}/jobs',
request_field='job',
request_type_name='BigqueryJobsInsertRequest',
response_type_name='Job',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists all jobs that you started in the specified project. Job information is available for a six month period after creation. The job list is sorted in reverse chronological order, by job creation time. Requires the Can View project role, or the Is Owner project role if you set the allUsers property.
Args:
request: (BigqueryJobsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(JobList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='bigquery.jobs.list',
ordered_params=['projectId'],
path_params=['projectId'],
query_params=['allUsers', 'maxResults', 'pageToken', 'projection', 'stateFilter'],
relative_path='projects/{projectId}/jobs',
request_field='',
request_type_name='BigqueryJobsListRequest',
response_type_name='JobList',
supports_download=False,
)
def Query(self, request, global_params=None):
r"""Runs a BigQuery SQL query synchronously and returns query results if the query completes within a specified timeout.
Args:
request: (BigqueryJobsQueryRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(QueryResponse) The response message.
"""
config = self.GetMethodConfig('Query')
return self._RunMethod(
config, request, global_params=global_params)
Query.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='bigquery.jobs.query',
ordered_params=['projectId'],
path_params=['projectId'],
query_params=[],
relative_path='projects/{projectId}/queries',
request_field='queryRequest',
request_type_name='BigqueryJobsQueryRequest',
response_type_name='QueryResponse',
supports_download=False,
)
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
_NAME = 'projects'
def __init__(self, client):
super(BigqueryV2.ProjectsService, self).__init__(client)
self._upload_configs = {
}
def List(self, request, global_params=None):
r"""Lists all projects to which you have been granted any project role.
Args:
request: (BigqueryProjectsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ProjectList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='bigquery.projects.list',
ordered_params=[],
path_params=[],
query_params=['maxResults', 'pageToken'],
relative_path='projects',
request_field='',
request_type_name='BigqueryProjectsListRequest',
response_type_name='ProjectList',
supports_download=False,
)
class TabledataService(base_api.BaseApiService):
"""Service class for the tabledata resource."""
_NAME = 'tabledata'
def __init__(self, client):
super(BigqueryV2.TabledataService, self).__init__(client)
self._upload_configs = {
}
def InsertAll(self, request, global_params=None):
r"""Streams data into BigQuery one record at a time without needing to run a load job. Requires the WRITER dataset role.
Args:
request: (BigqueryTabledataInsertAllRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TableDataInsertAllResponse) The response message.
"""
config = self.GetMethodConfig('InsertAll')
return self._RunMethod(
config, request, global_params=global_params)
InsertAll.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='bigquery.tabledata.insertAll',
ordered_params=['projectId', 'datasetId', 'tableId'],
path_params=['datasetId', 'projectId', 'tableId'],
query_params=[],
relative_path='projects/{projectId}/datasets/{datasetId}/tables/{tableId}/insertAll',
request_field='tableDataInsertAllRequest',
request_type_name='BigqueryTabledataInsertAllRequest',
response_type_name='TableDataInsertAllResponse',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Retrieves table data from a specified set of rows. Requires the READER dataset role.
Args:
request: (BigqueryTabledataListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TableDataList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='bigquery.tabledata.list',
ordered_params=['projectId', 'datasetId', 'tableId'],
path_params=['datasetId', 'projectId', 'tableId'],
query_params=['maxResults', 'pageToken', 'startIndex'],
relative_path='projects/{projectId}/datasets/{datasetId}/tables/{tableId}/data',
request_field='',
request_type_name='BigqueryTabledataListRequest',
response_type_name='TableDataList',
supports_download=False,
)
class TablesService(base_api.BaseApiService):
"""Service class for the tables resource."""
_NAME = 'tables'
def __init__(self, client):
super(BigqueryV2.TablesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
r"""Deletes the table specified by tableId from the dataset. If the table contains data, all the data will be deleted.
Args:
request: (BigqueryTablesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BigqueryTablesDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method='DELETE',
method_id='bigquery.tables.delete',
ordered_params=['projectId', 'datasetId', 'tableId'],
path_params=['datasetId', 'projectId', 'tableId'],
query_params=[],
relative_path='projects/{projectId}/datasets/{datasetId}/tables/{tableId}',
request_field='',
request_type_name='BigqueryTablesDeleteRequest',
response_type_name='BigqueryTablesDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the specified table resource by table ID. This method does not return the data in the table, it only returns the table resource, which describes the structure of this table.
Args:
request: (BigqueryTablesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Table) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='bigquery.tables.get',
ordered_params=['projectId', 'datasetId', 'tableId'],
path_params=['datasetId', 'projectId', 'tableId'],
query_params=[],
relative_path='projects/{projectId}/datasets/{datasetId}/tables/{tableId}',
request_field='',
request_type_name='BigqueryTablesGetRequest',
response_type_name='Table',
supports_download=False,
)
def Insert(self, request, global_params=None):
r"""Creates a new, empty table in the dataset.
Args:
request: (BigqueryTablesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Table) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method='POST',
method_id='bigquery.tables.insert',
ordered_params=['projectId', 'datasetId'],
path_params=['datasetId', 'projectId'],
query_params=[],
relative_path='projects/{projectId}/datasets/{datasetId}/tables',
request_field='table',
request_type_name='BigqueryTablesInsertRequest',
response_type_name='Table',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists all tables in the specified dataset. Requires the READER dataset role.
Args:
request: (BigqueryTablesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TableList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='bigquery.tables.list',
ordered_params=['projectId', 'datasetId'],
path_params=['datasetId', 'projectId'],
query_params=['maxResults', 'pageToken'],
relative_path='projects/{projectId}/datasets/{datasetId}/tables',
request_field='',
request_type_name='BigqueryTablesListRequest',
response_type_name='TableList',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates information in an existing table. The update method replaces the entire table resource, whereas the patch method only replaces fields that are provided in the submitted table resource. This method supports patch semantics.
Args:
request: (BigqueryTablesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Table) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method='PATCH',
method_id='bigquery.tables.patch',
ordered_params=['projectId', 'datasetId', 'tableId'],
path_params=['datasetId', 'projectId', 'tableId'],
query_params=[],
relative_path='projects/{projectId}/datasets/{datasetId}/tables/{tableId}',
request_field='table',
request_type_name='BigqueryTablesPatchRequest',
response_type_name='Table',
supports_download=False,
)
def Update(self, request, global_params=None):
r"""Updates information in an existing table. The update method replaces the entire table resource, whereas the patch method only replaces fields that are provided in the submitted table resource.
Args:
request: (BigqueryTablesUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Table) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method='PUT',
method_id='bigquery.tables.update',
ordered_params=['projectId', 'datasetId', 'tableId'],
path_params=['datasetId', 'projectId', 'tableId'],
query_params=[],
relative_path='projects/{projectId}/datasets/{datasetId}/tables/{tableId}',
request_field='table',
request_type_name='BigqueryTablesUpdateRequest',
response_type_name='Table',
supports_download=False,
)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/samples/bigquery_sample/bigquery_v2/__init__.py | samples/bigquery_sample/bigquery_v2/__init__.py | """Package marker file."""
from __future__ import absolute_import
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/__init__.py | apitools/__init__.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared __init__.py for apitools."""
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/scripts/__init__.py | apitools/scripts/__init__.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared __init__.py for apitools."""
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/__init__.py | apitools/base/__init__.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared __init__.py for apitools."""
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/stream_slice_test.py | apitools/base/py/stream_slice_test.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for stream_slice."""
import string
import unittest
import six
from apitools.base.py import exceptions
from apitools.base.py import stream_slice
class StreamSliceTest(unittest.TestCase):
def setUp(self):
self.stream = six.StringIO(string.ascii_letters)
self.value = self.stream.getvalue()
self.stream.seek(0)
def testSimpleSlice(self):
ss = stream_slice.StreamSlice(self.stream, 10)
self.assertEqual('', ss.read(0))
self.assertEqual(self.value[0:3], ss.read(3))
self.assertIn('7/10', str(ss))
self.assertEqual(self.value[3:10], ss.read())
self.assertEqual('', ss.read())
self.assertEqual('', ss.read(10))
self.assertEqual(10, self.stream.tell())
def testEmptySlice(self):
ss = stream_slice.StreamSlice(self.stream, 0)
self.assertEqual('', ss.read(5))
self.assertEqual('', ss.read())
self.assertEqual(0, self.stream.tell())
def testOffsetStream(self):
self.stream.seek(26)
ss = stream_slice.StreamSlice(self.stream, 26)
self.assertEqual(self.value[26:36], ss.read(10))
self.assertEqual(self.value[36:], ss.read())
self.assertEqual('', ss.read())
def testTooShortStream(self):
ss = stream_slice.StreamSlice(self.stream, 1000)
self.assertEqual(self.value, ss.read())
self.assertEqual('', ss.read(0))
with self.assertRaises(exceptions.StreamExhausted) as e:
ss.read()
with self.assertRaises(exceptions.StreamExhausted) as e:
ss.read(10)
self.assertIn('exhausted after %d' % len(self.value), str(e.exception))
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/extra_types_test.py | apitools/base/py/extra_types_test.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import json
import math
import unittest
from apitools.base.protorpclite import messages
from apitools.base.py import encoding
from apitools.base.py import exceptions
from apitools.base.py import extra_types
class ExtraTypesTest(unittest.TestCase):
def assertRoundTrip(self, value):
if isinstance(value, extra_types._JSON_PROTO_TYPES):
self.assertEqual(
value,
extra_types._PythonValueToJsonProto(
extra_types._JsonProtoToPythonValue(value)))
else:
self.assertEqual(
value,
extra_types._JsonProtoToPythonValue(
extra_types._PythonValueToJsonProto(value)))
def assertTranslations(self, py_value, json_proto):
self.assertEqual(
py_value, extra_types._JsonProtoToPythonValue(json_proto))
self.assertEqual(
json_proto, extra_types._PythonValueToJsonProto(py_value))
def testInvalidProtos(self):
with self.assertRaises(exceptions.InvalidDataError):
extra_types._ValidateJsonValue(extra_types.JsonValue())
with self.assertRaises(exceptions.InvalidDataError):
extra_types._ValidateJsonValue(
extra_types.JsonValue(is_null=True, string_value='a'))
with self.assertRaises(exceptions.InvalidDataError):
extra_types._ValidateJsonValue(
extra_types.JsonValue(integer_value=3, string_value='a'))
def testNullEncoding(self):
self.assertTranslations(None, extra_types.JsonValue(is_null=True))
def testJsonNumberEncoding(self):
seventeen = extra_types.JsonValue(integer_value=17)
self.assertRoundTrip(17)
self.assertRoundTrip(seventeen)
self.assertTranslations(17, seventeen)
json_pi = extra_types.JsonValue(double_value=math.pi)
self.assertRoundTrip(math.pi)
self.assertRoundTrip(json_pi)
self.assertTranslations(math.pi, json_pi)
def testArrayEncoding(self):
array = [3, 'four', False]
json_array = extra_types.JsonArray(entries=[
extra_types.JsonValue(integer_value=3),
extra_types.JsonValue(string_value='four'),
extra_types.JsonValue(boolean_value=False),
])
self.assertRoundTrip(array)
self.assertRoundTrip(json_array)
self.assertTranslations(array, json_array)
def testArrayAsValue(self):
array_json = '[3, "four", false]'
array = [3, 'four', False]
value = encoding.JsonToMessage(extra_types.JsonValue, array_json)
self.assertTrue(isinstance(value, extra_types.JsonValue))
self.assertEqual(array, encoding.MessageToPyValue(value))
def testObjectAsValue(self):
obj_json = '{"works": true}'
obj = {'works': True}
value = encoding.JsonToMessage(extra_types.JsonValue, obj_json)
self.assertTrue(isinstance(value, extra_types.JsonValue))
self.assertEqual(obj, encoding.MessageToPyValue(value))
def testDictEncoding(self):
d = {'a': 6, 'b': 'eleventeen'}
json_d = extra_types.JsonObject(properties=[
extra_types.JsonObject.Property(
key='a', value=extra_types.JsonValue(integer_value=6)),
extra_types.JsonObject.Property(
key='b',
value=extra_types.JsonValue(string_value='eleventeen')),
])
self.assertRoundTrip(d)
# We don't know json_d will round-trip, because of randomness in
# python dictionary iteration ordering. We also need to force
# comparison as lists, since hashing protos isn't helpful.
translated_properties = extra_types._PythonValueToJsonProto(
d).properties
for p in json_d.properties:
self.assertIn(p, translated_properties)
for p in translated_properties:
self.assertIn(p, json_d.properties)
def testJsonObjectPropertyTranslation(self):
value = extra_types.JsonValue(string_value='abc')
obj = extra_types.JsonObject(properties=[
extra_types.JsonObject.Property(key='attr_name', value=value)])
json_value = '"abc"'
json_obj = '{"attr_name": "abc"}'
self.assertRoundTrip(value)
self.assertRoundTrip(obj)
self.assertRoundTrip(json_value)
self.assertRoundTrip(json_obj)
self.assertEqual(json_value, encoding.MessageToJson(value))
self.assertEqual(json_obj, encoding.MessageToJson(obj))
def testJsonValueAsFieldTranslation(self):
class HasJsonValueMsg(messages.Message):
some_value = messages.MessageField(extra_types.JsonValue, 1)
msg_json = '{"some_value": [1, 2, 3]}'
msg = HasJsonValueMsg(
some_value=encoding.PyValueToMessage(
extra_types.JsonValue, [1, 2, 3]))
self.assertEqual(msg,
encoding.JsonToMessage(HasJsonValueMsg, msg_json))
self.assertEqual(msg_json, encoding.MessageToJson(msg))
def testDateField(self):
class DateMsg(messages.Message):
start_date = extra_types.DateField(1)
all_dates = extra_types.DateField(2, repeated=True)
msg = DateMsg(
start_date=datetime.date(1752, 9, 9), all_dates=[
datetime.date(1979, 5, 6),
datetime.date(1980, 10, 24),
datetime.date(1981, 1, 19),
])
msg_dict = {
'start_date': '1752-09-09',
'all_dates': ['1979-05-06', '1980-10-24', '1981-01-19'],
}
self.assertEqual(msg_dict, json.loads(encoding.MessageToJson(msg)))
self.assertEqual(
msg, encoding.JsonToMessage(DateMsg, json.dumps(msg_dict)))
def testInt64(self):
# Testing roundtrip of type 'long'
class DogeMsg(messages.Message):
such_string = messages.StringField(1)
wow = messages.IntegerField(2, variant=messages.Variant.INT64)
very_unsigned = messages.IntegerField(
3, variant=messages.Variant.UINT64)
much_repeated = messages.IntegerField(
4, variant=messages.Variant.INT64, repeated=True)
def MtoJ(msg):
return encoding.MessageToJson(msg)
def JtoM(class_type, json_str):
return encoding.JsonToMessage(class_type, json_str)
def DoRoundtrip(class_type, json_msg=None, message=None, times=4):
if json_msg:
json_msg = MtoJ(JtoM(class_type, json_msg))
if message:
message = JtoM(class_type, MtoJ(message))
if times == 0:
result = json_msg if json_msg else message
return result
return DoRoundtrip(class_type=class_type, json_msg=json_msg,
message=message, times=times - 1)
# Single
json_msg = ('{"such_string": "poot", "wow": "-1234", '
'"very_unsigned": "999", "much_repeated": ["123", "456"]}')
out_json = MtoJ(JtoM(DogeMsg, json_msg))
self.assertEqual(json.loads(out_json)['wow'], '-1234')
# Repeated test case
msg = DogeMsg(such_string='wow', wow=-1234,
very_unsigned=800, much_repeated=[123, 456])
self.assertEqual(msg, DoRoundtrip(DogeMsg, message=msg))
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/buffered_stream.py | apitools/base/py/buffered_stream.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Small helper class to provide a small slice of a stream.
This class reads ahead to detect if we are at the end of the stream.
"""
from apitools.base.py import exceptions
# TODO(user): Consider replacing this with a StringIO.
class BufferedStream(object):
"""Buffers a stream, reading ahead to determine if we're at the end."""
def __init__(self, stream, start, size):
self.__stream = stream
self.__start_pos = start
self.__buffer_pos = 0
self.__buffered_data = self.__stream.read(size)
self.__stream_at_end = len(self.__buffered_data) < size
self.__end_pos = self.__start_pos + len(self.__buffered_data)
def __str__(self):
return ('Buffered stream %s from position %s-%s with %s '
'bytes remaining' % (self.__stream, self.__start_pos,
self.__end_pos, self._bytes_remaining))
def __len__(self):
return len(self.__buffered_data)
@property
def stream_exhausted(self):
return self.__stream_at_end
@property
def stream_end_position(self):
return self.__end_pos
@property
def _bytes_remaining(self):
return len(self.__buffered_data) - self.__buffer_pos
def read(self, size=None): # pylint: disable=invalid-name
"""Reads from the buffer."""
if size is None or size < 0:
raise exceptions.NotYetImplementedError(
'Illegal read of size %s requested on BufferedStream. '
'Wrapped stream %s is at position %s-%s, '
'%s bytes remaining.' %
(size, self.__stream, self.__start_pos, self.__end_pos,
self._bytes_remaining))
data = ''
if self._bytes_remaining:
size = min(size, self._bytes_remaining)
data = self.__buffered_data[
self.__buffer_pos:self.__buffer_pos + size]
self.__buffer_pos += size
return data
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/util_test.py | apitools/base/py/util_test.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for util.py."""
import unittest
from apitools.base.protorpclite import messages
from apitools.base.py import encoding
from apitools.base.py import exceptions
from apitools.base.py import util
class MockedMethodConfig(object):
def __init__(self, relative_path, path_params):
self.relative_path = relative_path
self.path_params = path_params
class MessageWithRemappings(messages.Message):
class AnEnum(messages.Enum):
value_one = 1
value_two = 2
str_field = messages.StringField(1)
enum_field = messages.EnumField('AnEnum', 2)
enum_field_remapping = messages.EnumField('AnEnum', 3)
encoding.AddCustomJsonFieldMapping(
MessageWithRemappings, 'str_field', 'path_field')
encoding.AddCustomJsonFieldMapping(
MessageWithRemappings, 'enum_field_remapping', 'enum_field_remapped')
encoding.AddCustomJsonEnumMapping(
MessageWithRemappings.AnEnum, 'value_one', 'ONE')
class UtilTest(unittest.TestCase):
def testExpand(self):
method_config_xy = MockedMethodConfig(relative_path='{x}/y/{z}',
path_params=['x', 'z'])
self.assertEqual(
util.ExpandRelativePath(method_config_xy, {'x': '1', 'z': '2'}),
'1/y/2')
self.assertEqual(
util.ExpandRelativePath(
method_config_xy,
{'x': '1', 'z': '2'},
relative_path='{x}/y/{z}/q'),
'1/y/2/q')
def testReservedExpansion(self):
method_config_reserved = MockedMethodConfig(relative_path='{+x}/baz',
path_params=['x'])
self.assertEqual('foo/:bar:/baz', util.ExpandRelativePath(
method_config_reserved, {'x': 'foo/:bar:'}))
method_config_no_reserved = MockedMethodConfig(relative_path='{x}/baz',
path_params=['x'])
self.assertEqual('foo%2F%3Abar%3A/baz', util.ExpandRelativePath(
method_config_no_reserved, {'x': 'foo/:bar:'}))
def testCalculateWaitForRetry(self):
try0 = util.CalculateWaitForRetry(0)
self.assertTrue(try0 >= 1.0)
self.assertTrue(try0 <= 1.5)
try1 = util.CalculateWaitForRetry(1)
self.assertTrue(try1 >= 1.0)
self.assertTrue(try1 <= 3.0)
try2 = util.CalculateWaitForRetry(2)
self.assertTrue(try2 >= 2.0)
self.assertTrue(try2 <= 6.0)
try3 = util.CalculateWaitForRetry(3)
self.assertTrue(try3 >= 4.0)
self.assertTrue(try3 <= 12.0)
try4 = util.CalculateWaitForRetry(4)
self.assertTrue(try4 >= 8.0)
self.assertTrue(try4 <= 24.0)
self.assertAlmostEqual(10, util.CalculateWaitForRetry(5, max_wait=10))
def testTypecheck(self):
class Class1(object):
pass
class Class2(object):
pass
class Class3(object):
pass
instance_of_class1 = Class1()
self.assertEqual(
instance_of_class1, util.Typecheck(instance_of_class1, Class1))
self.assertEqual(
instance_of_class1,
util.Typecheck(instance_of_class1, ((Class1, Class2), Class3)))
self.assertEqual(
instance_of_class1,
util.Typecheck(instance_of_class1, (Class1, (Class2, Class3))))
self.assertEqual(
instance_of_class1,
util.Typecheck(instance_of_class1, Class1, 'message'))
self.assertEqual(
instance_of_class1,
util.Typecheck(
instance_of_class1, ((Class1, Class2), Class3), 'message'))
self.assertEqual(
instance_of_class1,
util.Typecheck(
instance_of_class1, (Class1, (Class2, Class3)), 'message'))
with self.assertRaises(exceptions.TypecheckError):
util.Typecheck(instance_of_class1, Class2)
with self.assertRaises(exceptions.TypecheckError):
util.Typecheck(instance_of_class1, (Class2, Class3))
with self.assertRaises(exceptions.TypecheckError):
util.Typecheck(instance_of_class1, Class2, 'message')
with self.assertRaises(exceptions.TypecheckError):
util.Typecheck(instance_of_class1, (Class2, Class3), 'message')
def testAcceptableMimeType(self):
valid_pairs = (
('*', 'text/plain'),
('*/*', 'text/plain'),
('text/*', 'text/plain'),
('*/plain', 'text/plain'),
('text/plain', 'text/plain'),
)
for accept, mime_type in valid_pairs:
self.assertTrue(util.AcceptableMimeType([accept], mime_type))
invalid_pairs = (
('text/*', 'application/json'),
('text/plain', 'application/json'),
)
for accept, mime_type in invalid_pairs:
self.assertFalse(util.AcceptableMimeType([accept], mime_type))
self.assertTrue(util.AcceptableMimeType(['application/json', '*/*'],
'text/plain'))
self.assertFalse(util.AcceptableMimeType(['application/json', 'img/*'],
'text/plain'))
def testMalformedMimeType(self):
self.assertRaises(
exceptions.InvalidUserInputError,
util.AcceptableMimeType, ['*/*'], 'abcd')
def testUnsupportedMimeType(self):
self.assertRaises(
exceptions.GeneratedClientError,
util.AcceptableMimeType, ['text/html;q=0.9'], 'text/html')
def testMapRequestParams(self):
params = {
'str_field': 'foo',
'enum_field': MessageWithRemappings.AnEnum.value_one,
'enum_field_remapping': MessageWithRemappings.AnEnum.value_one,
}
remapped_params = {
'path_field': 'foo',
'enum_field': 'ONE',
'enum_field_remapped': 'ONE',
}
self.assertEqual(remapped_params,
util.MapRequestParams(params, MessageWithRemappings))
params['enum_field'] = MessageWithRemappings.AnEnum.value_two
remapped_params['enum_field'] = 'value_two'
self.assertEqual(remapped_params,
util.MapRequestParams(params, MessageWithRemappings))
def testMapParamNames(self):
params = ['path_field', 'enum_field']
remapped_params = ['str_field', 'enum_field']
self.assertEqual(remapped_params,
util.MapParamNames(params, MessageWithRemappings))
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/exceptions_test.py | apitools/base/py/exceptions_test.py | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from apitools.base.py import exceptions
from apitools.base.py import http_wrapper
def _MakeResponse(status_code):
return http_wrapper.Response(
info={'status': status_code}, content='{"field": "abc"}',
request_url='http://www.google.com')
class HttpErrorFromResponseTest(unittest.TestCase):
"""Tests for exceptions.HttpError.FromResponse."""
def testBadRequest(self):
err = exceptions.HttpError.FromResponse(_MakeResponse(400))
self.assertIsInstance(err, exceptions.HttpError)
self.assertIsInstance(err, exceptions.HttpBadRequestError)
self.assertEqual(err.status_code, 400)
def testUnauthorized(self):
err = exceptions.HttpError.FromResponse(_MakeResponse(401))
self.assertIsInstance(err, exceptions.HttpError)
self.assertIsInstance(err, exceptions.HttpUnauthorizedError)
self.assertEqual(err.status_code, 401)
def testForbidden(self):
err = exceptions.HttpError.FromResponse(_MakeResponse(403))
self.assertIsInstance(err, exceptions.HttpError)
self.assertIsInstance(err, exceptions.HttpForbiddenError)
self.assertEqual(err.status_code, 403)
def testExceptionMessageIncludesErrorDetails(self):
err = exceptions.HttpError.FromResponse(_MakeResponse(403))
self.assertIn('403', repr(err))
self.assertIn('http://www.google.com', repr(err))
self.assertIn('{"field": "abc"}', repr(err))
def testNotFound(self):
err = exceptions.HttpError.FromResponse(_MakeResponse(404))
self.assertIsInstance(err, exceptions.HttpError)
self.assertIsInstance(err, exceptions.HttpNotFoundError)
self.assertEqual(err.status_code, 404)
def testConflict(self):
err = exceptions.HttpError.FromResponse(_MakeResponse(409))
self.assertIsInstance(err, exceptions.HttpError)
self.assertIsInstance(err, exceptions.HttpConflictError)
self.assertEqual(err.status_code, 409)
def testUnknownStatus(self):
err = exceptions.HttpError.FromResponse(_MakeResponse(499))
self.assertIsInstance(err, exceptions.HttpError)
self.assertEqual(err.status_code, 499)
def testMalformedStatus(self):
err = exceptions.HttpError.FromResponse(_MakeResponse('BAD'))
self.assertIsInstance(err, exceptions.HttpError)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/list_pager.py | apitools/base/py/list_pager.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A helper function that executes a series of List queries for many APIs."""
from apitools.base.py import encoding
import six
__all__ = [
'YieldFromList',
]
def _GetattrNested(message, attribute):
"""Gets a possibly nested attribute.
Same as getattr() if attribute is a string;
if attribute is a tuple, returns the nested attribute referred to by
the fields in the tuple as if they were a dotted accessor path.
(ex _GetattrNested(msg, ('foo', 'bar', 'baz')) gets msg.foo.bar.baz
"""
if isinstance(attribute, six.string_types):
return getattr(message, attribute)
elif len(attribute) == 0:
return message
else:
return _GetattrNested(getattr(message, attribute[0]), attribute[1:])
def _SetattrNested(message, attribute, value):
"""Sets a possibly nested attribute.
Same as setattr() if attribute is a string;
if attribute is a tuple, sets the nested attribute referred to by
the fields in the tuple as if they were a dotted accessor path.
(ex _SetattrNested(msg, ('foo', 'bar', 'baz'), 'v') sets msg.foo.bar.baz
to 'v'
"""
if isinstance(attribute, six.string_types):
return setattr(message, attribute, value)
elif len(attribute) < 1:
raise ValueError("Need an attribute to set")
elif len(attribute) == 1:
return setattr(message, attribute[0], value)
else:
return setattr(_GetattrNested(message, attribute[:-1]),
attribute[-1], value)
def YieldFromList(
service, request, global_params=None, limit=None, batch_size=100,
method='List', field='items', predicate=None,
current_token_attribute='pageToken',
next_token_attribute='nextPageToken',
batch_size_attribute='maxResults',
get_field_func=_GetattrNested):
"""Make a series of List requests, keeping track of page tokens.
Args:
service: apitools_base.BaseApiService, A service with a .List() method.
request: protorpc.messages.Message, The request message
corresponding to the service's .List() method, with all the
attributes populated except the .maxResults and .pageToken
attributes.
global_params: protorpc.messages.Message, The global query parameters to
provide when calling the given method.
limit: int, The maximum number of records to yield. None if all available
records should be yielded.
batch_size: int, The number of items to retrieve per request.
method: str, The name of the method used to fetch resources.
field: str, The field in the response that will be a list of items.
predicate: lambda, A function that returns true for items to be yielded.
current_token_attribute: str or tuple, The name of the attribute in a
request message holding the page token for the page being
requested. If a tuple, path to attribute.
next_token_attribute: str or tuple, The name of the attribute in a
response message holding the page token for the next page. If a
tuple, path to the attribute.
batch_size_attribute: str or tuple, The name of the attribute in a
response message holding the maximum number of results to be
returned. None if caller-specified batch size is unsupported.
If a tuple, path to the attribute.
get_field_func: Function that returns the items to be yielded. Argument
is response message, and field.
Yields:
protorpc.message.Message, The resources listed by the service.
"""
request = encoding.CopyProtoMessage(request)
_SetattrNested(request, current_token_attribute, None)
while limit is None or limit:
if batch_size_attribute:
# On Py3, None is not comparable so min() below will fail.
# On Py2, None is always less than any number so if batch_size
# is None, the request_batch_size will always be None regardless
# of the value of limit. This doesn't generally strike me as the
# correct behavior, but this change preserves the existing Py2
# behavior on Py3.
if batch_size is None:
request_batch_size = None
else:
request_batch_size = min(batch_size, limit or batch_size)
_SetattrNested(request, batch_size_attribute, request_batch_size)
response = getattr(service, method)(request,
global_params=global_params)
items = get_field_func(response, field)
if predicate:
items = list(filter(predicate, items))
for item in items:
yield item
if limit is None:
continue
limit -= 1
if not limit:
return
token = _GetattrNested(response, next_token_attribute)
if not token:
return
_SetattrNested(request, current_token_attribute, token)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/credentials_lib.py | apitools/base/py/credentials_lib.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common credentials classes and constructors."""
from __future__ import print_function
import argparse
import contextlib
import datetime
import json
import os
import threading
import warnings
import httplib2
import oauth2client
import oauth2client.client
from oauth2client import service_account
from oauth2client import tools # for gflags declarations
import six
from six.moves import http_client
from six.moves import urllib
from apitools.base.py import exceptions
from apitools.base.py import util
# App Engine does not support ctypes which are required for the
# monotonic time used in fasteners. Conversely, App Engine does
# not support colocated concurrent processes, so process locks
# are not needed.
try:
import fasteners
_FASTENERS_AVAILABLE = True
except ImportError as import_error:
server_env = os.environ.get('SERVER_SOFTWARE', '')
if not (server_env.startswith('Development') or
server_env.startswith('Google App Engine')):
raise import_error
_FASTENERS_AVAILABLE = False
# Note: we try the oauth2client imports two ways, to accomodate layout
# changes in oauth2client 2.0+. We can remove these once we no longer
# support oauth2client < 2.0.
#
# pylint: disable=wrong-import-order,ungrouped-imports
try:
from oauth2client.contrib import gce
except ImportError:
from oauth2client import gce
try:
from oauth2client.contrib import multiprocess_file_storage
_NEW_FILESTORE = True
except ImportError:
_NEW_FILESTORE = False
try:
from oauth2client.contrib import multistore_file
except ImportError:
from oauth2client import multistore_file
try:
import gflags
FLAGS = gflags.FLAGS
except ImportError:
FLAGS = None
__all__ = [
'CredentialsFromFile',
'GaeAssertionCredentials',
'GceAssertionCredentials',
'GetCredentials',
'GetUserinfo',
'ServiceAccountCredentialsFromFile',
]
# Lock when accessing the cache file to avoid resource contention.
cache_file_lock = threading.Lock()
def SetCredentialsCacheFileLock(lock):
global cache_file_lock # pylint: disable=global-statement
cache_file_lock = lock
# List of additional methods we use when attempting to construct
# credentials. Users can register their own methods here, which we try
# before the defaults.
_CREDENTIALS_METHODS = []
def _RegisterCredentialsMethod(method, position=None):
"""Register a new method for fetching credentials.
This new method should be a function with signature:
client_info, **kwds -> Credentials or None
This method can be used as a decorator, unless position needs to
be supplied.
Note that method must *always* accept arbitrary keyword arguments.
Args:
method: New credential-fetching method.
position: (default: None) Where in the list of methods to
add this; if None, we append. In all but rare cases,
this should be either 0 or None.
Returns:
method, for use as a decorator.
"""
if position is None:
position = len(_CREDENTIALS_METHODS)
else:
position = min(position, len(_CREDENTIALS_METHODS))
_CREDENTIALS_METHODS.insert(position, method)
return method
def GetCredentials(package_name, scopes, client_id, client_secret, user_agent,
credentials_filename=None,
api_key=None, # pylint: disable=unused-argument
client=None, # pylint: disable=unused-argument
oauth2client_args=None,
**kwds):
"""Attempt to get credentials, using an oauth dance as the last resort."""
scopes = util.NormalizeScopes(scopes)
client_info = {
'client_id': client_id,
'client_secret': client_secret,
'scope': ' '.join(sorted(scopes)),
'user_agent': user_agent or '%s-generated/0.1' % package_name,
}
for method in _CREDENTIALS_METHODS:
credentials = method(client_info, **kwds)
if credentials is not None:
return credentials
credentials_filename = credentials_filename or os.path.expanduser(
'~/.apitools.token')
credentials = CredentialsFromFile(credentials_filename, client_info,
oauth2client_args=oauth2client_args)
if credentials is not None:
return credentials
raise exceptions.CredentialsError('Could not create valid credentials')
def ServiceAccountCredentialsFromFile(filename, scopes, user_agent=None):
"""Use the credentials in filename to create a token for scopes."""
filename = os.path.expanduser(filename)
# We have two options, based on our version of oauth2client.
if oauth2client.__version__ > '1.5.2':
# oauth2client >= 2.0.0
credentials = (
service_account.ServiceAccountCredentials.from_json_keyfile_name(
filename, scopes=scopes))
if credentials is not None:
if user_agent is not None:
credentials.user_agent = user_agent
return credentials
else:
# oauth2client < 2.0.0
with open(filename) as keyfile:
service_account_info = json.load(keyfile)
account_type = service_account_info.get('type')
if account_type != oauth2client.client.SERVICE_ACCOUNT:
raise exceptions.CredentialsError(
'Invalid service account credentials: %s' % (filename,))
# pylint: disable=protected-access
credentials = service_account._ServiceAccountCredentials(
service_account_id=service_account_info['client_id'],
service_account_email=service_account_info['client_email'],
private_key_id=service_account_info['private_key_id'],
private_key_pkcs8_text=service_account_info['private_key'],
scopes=scopes, user_agent=user_agent)
# pylint: enable=protected-access
return credentials
def ServiceAccountCredentialsFromP12File(
service_account_name, private_key_filename, scopes, user_agent):
"""Create a new credential from the named .p12 keyfile."""
private_key_filename = os.path.expanduser(private_key_filename)
scopes = util.NormalizeScopes(scopes)
if oauth2client.__version__ > '1.5.2':
# oauth2client >= 2.0.0
credentials = (
service_account.ServiceAccountCredentials.from_p12_keyfile(
service_account_name, private_key_filename, scopes=scopes))
if credentials is not None:
credentials.user_agent = user_agent
return credentials
else:
# oauth2client < 2.0.0
with open(private_key_filename, 'rb') as key_file:
return oauth2client.client.SignedJwtAssertionCredentials(
service_account_name, key_file.read(), scopes,
user_agent=user_agent)
def _GceMetadataRequest(relative_url, use_metadata_ip=False):
"""Request the given url from the GCE metadata service."""
if use_metadata_ip:
base_url = os.environ.get('GCE_METADATA_IP', '169.254.169.254')
else:
base_url = os.environ.get(
'GCE_METADATA_ROOT', 'metadata.google.internal')
url = 'http://' + base_url + '/computeMetadata/v1/' + relative_url
# Extra header requirement can be found here:
# https://developers.google.com/compute/docs/metadata
headers = {'Metadata-Flavor': 'Google'}
request = urllib.request.Request(url, headers=headers)
opener = urllib.request.build_opener(urllib.request.ProxyHandler({}))
try:
response = opener.open(request)
except urllib.error.URLError as e:
raise exceptions.CommunicationError(
'Could not reach metadata service: %s' % e.reason)
return response
class GceAssertionCredentials(gce.AppAssertionCredentials):
"""Assertion credentials for GCE instances."""
def __init__(self, scopes=None, service_account_name='default', **kwds):
"""Initializes the credentials instance.
Args:
scopes: The scopes to get. If None, whatever scopes that are
available to the instance are used.
service_account_name: The service account to retrieve the scopes
from.
**kwds: Additional keyword args.
"""
# If there is a connectivity issue with the metadata server,
# detection calls may fail even if we've already successfully
# identified these scopes in the same execution. However, the
# available scopes don't change once an instance is created,
# so there is no reason to perform more than one query.
self.__service_account_name = six.ensure_text(
service_account_name,
encoding='utf-8',)
cached_scopes = None
cache_filename = kwds.get('cache_filename')
if cache_filename:
cached_scopes = self._CheckCacheFileForMatch(
cache_filename, scopes)
scopes = cached_scopes or self._ScopesFromMetadataServer(scopes)
if cache_filename and not cached_scopes:
self._WriteCacheFile(cache_filename, scopes)
# We check the scopes above, but don't need them again after
# this point. Newer versions of oauth2client let us drop them
# here, but since we support older versions as well, we just
# catch and squelch the warning.
with warnings.catch_warnings():
warnings.simplefilter('ignore')
super(GceAssertionCredentials, self).__init__(scope=scopes, **kwds)
@classmethod
def Get(cls, *args, **kwds):
try:
return cls(*args, **kwds)
except exceptions.Error:
return None
def _CheckCacheFileForMatch(self, cache_filename, scopes):
"""Checks the cache file to see if it matches the given credentials.
Args:
cache_filename: Cache filename to check.
scopes: Scopes for the desired credentials.
Returns:
List of scopes (if cache matches) or None.
"""
creds = { # Credentials metadata dict.
'scopes': sorted(list(scopes)) if scopes else None,
'svc_acct_name': self.__service_account_name,
}
cache_file = _MultiProcessCacheFile(cache_filename)
try:
cached_creds_str = cache_file.LockedRead()
if not cached_creds_str:
return None
cached_creds = json.loads(cached_creds_str)
if creds['svc_acct_name'] == cached_creds['svc_acct_name']:
if creds['scopes'] in (None, cached_creds['scopes']):
return cached_creds['scopes']
except KeyboardInterrupt:
raise
except: # pylint: disable=bare-except
# Treat exceptions as a cache miss.
pass
def _WriteCacheFile(self, cache_filename, scopes):
"""Writes the credential metadata to the cache file.
This does not save the credentials themselves (CredentialStore class
optionally handles that after this class is initialized).
Args:
cache_filename: Cache filename to check.
scopes: Scopes for the desired credentials.
"""
# Credentials metadata dict.
scopes = sorted([six.ensure_text(scope) for scope in scopes])
creds = {'scopes': scopes,
'svc_acct_name': self.__service_account_name}
creds_str = json.dumps(creds)
cache_file = _MultiProcessCacheFile(cache_filename)
try:
cache_file.LockedWrite(creds_str)
except KeyboardInterrupt:
raise
except: # pylint: disable=bare-except
# Treat exceptions as a cache miss.
pass
def _ScopesFromMetadataServer(self, scopes):
"""Returns instance scopes based on GCE metadata server."""
if not util.DetectGce():
raise exceptions.ResourceUnavailableError(
'GCE credentials requested outside a GCE instance')
if not self.GetServiceAccount(self.__service_account_name):
raise exceptions.ResourceUnavailableError(
'GCE credentials requested but service account '
'%s does not exist.' % self.__service_account_name)
if scopes:
scope_ls = util.NormalizeScopes(scopes)
instance_scopes = self.GetInstanceScopes()
if scope_ls > instance_scopes:
raise exceptions.CredentialsError(
'Instance did not have access to scopes %s' % (
sorted(list(scope_ls - instance_scopes)),))
else:
scopes = self.GetInstanceScopes()
return scopes
def GetServiceAccount(self, account):
relative_url = 'instance/service-accounts'
response = _GceMetadataRequest(relative_url)
response_lines = [six.ensure_str(line).rstrip(u'/\n\r')
for line in response.readlines()]
return account in response_lines
def GetInstanceScopes(self):
relative_url = 'instance/service-accounts/{0}/scopes'.format(
self.__service_account_name)
response = _GceMetadataRequest(relative_url)
return util.NormalizeScopes(six.ensure_str(scope).strip()
for scope in response.readlines())
# pylint: disable=arguments-differ
def _refresh(self, do_request):
"""Refresh self.access_token.
This function replaces AppAssertionCredentials._refresh, which
does not use the credential store and is therefore poorly
suited for multi-threaded scenarios.
Args:
do_request: A function matching httplib2.Http.request's signature.
"""
# pylint: disable=protected-access
oauth2client.client.OAuth2Credentials._refresh(self, do_request)
# pylint: enable=protected-access
def _do_refresh_request(self, unused_http_request):
"""Refresh self.access_token by querying the metadata server.
If self.store is initialized, store acquired credentials there.
"""
relative_url = 'instance/service-accounts/{0}/token'.format(
self.__service_account_name)
try:
response = _GceMetadataRequest(relative_url)
except exceptions.CommunicationError:
self.invalid = True
if self.store:
self.store.locked_put(self)
raise
content = six.ensure_str(response.read())
try:
credential_info = json.loads(content)
except ValueError:
raise exceptions.CredentialsError(
'Could not parse response as JSON: %s' % content)
self.access_token = credential_info['access_token']
if 'expires_in' in credential_info:
expires_in = int(credential_info['expires_in'])
self.token_expiry = (
datetime.timedelta(seconds=expires_in) +
datetime.datetime.now(tz=datetime.timezone.utc).replace(tzinfo=None))
else:
self.token_expiry = None
self.invalid = False
if self.store:
self.store.locked_put(self)
def to_json(self):
# OAuth2Client made gce.AppAssertionCredentials unserializable as of
# v3.0, but we need those credentials to be serializable for use with
# this library, so we use AppAssertionCredentials' parent's to_json
# method.
# pylint: disable=bad-super-call
return super(gce.AppAssertionCredentials, self).to_json()
@classmethod
def from_json(cls, json_data):
data = json.loads(json_data)
kwargs = {}
if 'cache_filename' in data.get('kwargs', []):
kwargs['cache_filename'] = data['kwargs']['cache_filename']
# Newer versions of GceAssertionCredentials don't have a "scope"
# attribute.
scope_list = None
if 'scope' in data:
scope_list = [data['scope']]
credentials = GceAssertionCredentials(scopes=scope_list, **kwargs)
if 'access_token' in data:
credentials.access_token = data['access_token']
if 'token_expiry' in data:
credentials.token_expiry = datetime.datetime.strptime(
data['token_expiry'], oauth2client.client.EXPIRY_FORMAT)
if 'invalid' in data:
credentials.invalid = data['invalid']
return credentials
@property
def serialization_data(self):
raise NotImplementedError(
'Cannot serialize credentials for GCE service accounts.')
# TODO(craigcitro): Currently, we can't even *load*
# `oauth2client.appengine` without being on appengine, because of how
# it handles imports. Fix that by splitting that module into
# GAE-specific and GAE-independent bits, and guarding imports.
class GaeAssertionCredentials(oauth2client.client.AssertionCredentials):
"""Assertion credentials for Google App Engine apps."""
def __init__(self, scopes, **kwds):
if not util.DetectGae():
raise exceptions.ResourceUnavailableError(
'GCE credentials requested outside a GCE instance')
self._scopes = list(util.NormalizeScopes(scopes))
super(GaeAssertionCredentials, self).__init__(None, **kwds)
@classmethod
def Get(cls, *args, **kwds):
try:
return cls(*args, **kwds)
except exceptions.Error:
return None
@classmethod
def from_json(cls, json_data):
data = json.loads(json_data)
return GaeAssertionCredentials(data['_scopes'])
def _refresh(self, _):
"""Refresh self.access_token.
Args:
_: (ignored) A function matching httplib2.Http.request's signature.
"""
# pylint: disable=import-error
from google.appengine.api import app_identity
try:
token, _ = app_identity.get_access_token(self._scopes)
except app_identity.Error as e:
raise exceptions.CredentialsError(str(e))
self.access_token = token
def sign_blob(self, blob):
"""Cryptographically sign a blob (of bytes).
This method is provided to support a common interface, but
the actual key used for a Google Compute Engine service account
is not available, so it can't be used to sign content.
Args:
blob: bytes, Message to be signed.
Raises:
NotImplementedError, always.
"""
raise NotImplementedError(
'Compute Engine service accounts cannot sign blobs')
def _GetRunFlowFlags(args=None):
"""Retrieves command line flags based on gflags module."""
# There's one rare situation where gsutil will not have argparse
# available, but doesn't need anything depending on argparse anyway,
# since they're bringing their own credentials. So we just allow this
# to fail with an ImportError in those cases.
#
parser = argparse.ArgumentParser(parents=[tools.argparser])
# Get command line argparse flags.
flags, _ = parser.parse_known_args(args=args)
# Allow `gflags` and `argparse` to be used side-by-side.
if hasattr(FLAGS, 'auth_host_name'):
flags.auth_host_name = FLAGS.auth_host_name
if hasattr(FLAGS, 'auth_host_port'):
flags.auth_host_port = FLAGS.auth_host_port
if hasattr(FLAGS, 'auth_local_webserver'):
flags.noauth_local_webserver = (not FLAGS.auth_local_webserver)
return flags
# TODO(craigcitro): Switch this from taking a path to taking a stream.
def CredentialsFromFile(path, client_info, oauth2client_args=None):
"""Read credentials from a file."""
user_agent = client_info['user_agent']
scope_key = client_info['scope']
if not isinstance(scope_key, six.string_types):
scope_key = ':'.join(scope_key)
storage_key = client_info['client_id'] + user_agent + scope_key
if _NEW_FILESTORE:
credential_store = multiprocess_file_storage.MultiprocessFileStorage(
path, storage_key)
else:
credential_store = multistore_file.get_credential_storage_custom_string_key( # noqa
path, storage_key)
if hasattr(FLAGS, 'auth_local_webserver'):
FLAGS.auth_local_webserver = False
credentials = credential_store.get()
if credentials is None or credentials.invalid:
print('Generating new OAuth credentials ...')
for _ in range(20):
# If authorization fails, we want to retry, rather than let this
# cascade up and get caught elsewhere. If users want out of the
# retry loop, they can ^C.
try:
flow = oauth2client.client.OAuth2WebServerFlow(**client_info)
flags = _GetRunFlowFlags(args=oauth2client_args)
credentials = tools.run_flow(flow, credential_store, flags)
break
except (oauth2client.client.FlowExchangeError, SystemExit) as e:
# Here SystemExit is "no credential at all", and the
# FlowExchangeError is "invalid" -- usually because
# you reused a token.
print('Invalid authorization: %s' % (e,))
except httplib2.HttpLib2Error as e:
print('Communication error: %s' % (e,))
raise exceptions.CredentialsError(
'Communication error creating credentials: %s' % e)
return credentials
class _MultiProcessCacheFile(object):
"""Simple multithreading and multiprocessing safe cache file.
Notes on behavior:
* the fasteners.InterProcessLock object cannot reliably prevent threads
from double-acquiring a lock. A threading lock is used in addition to
the InterProcessLock. The threading lock is always acquired first and
released last.
* The interprocess lock will not deadlock. If a process can not acquire
the interprocess lock within `_lock_timeout` the call will return as
a cache miss or unsuccessful cache write.
* App Engine environments cannot be process locked because (1) the runtime
does not provide monotonic time and (2) different processes may or may
not share the same machine. Because of this, process locks are disabled
and locking is only guaranteed to protect against multithreaded access.
"""
_lock_timeout = 1
_encoding = 'utf-8'
_thread_lock = threading.Lock()
def __init__(self, filename):
self._file = None
self._filename = filename
if _FASTENERS_AVAILABLE:
self._process_lock_getter = self._ProcessLockAcquired
self._process_lock = fasteners.InterProcessLock(
'{0}.lock'.format(filename))
else:
self._process_lock_getter = self._DummyLockAcquired
self._process_lock = None
@contextlib.contextmanager
def _ProcessLockAcquired(self):
"""Context manager for process locks with timeout."""
try:
is_locked = self._process_lock.acquire(timeout=self._lock_timeout)
yield is_locked
finally:
if is_locked:
self._process_lock.release()
@contextlib.contextmanager
def _DummyLockAcquired(self):
"""Lock context manager for environments without process locks."""
yield True
def LockedRead(self):
"""Acquire an interprocess lock and dump cache contents.
This method safely acquires the locks then reads a string
from the cache file. If the file does not exist and cannot
be created, it will return None. If the locks cannot be
acquired, this will also return None.
Returns:
cache data - string if present, None on failure.
"""
file_contents = None
with self._thread_lock:
if not self._EnsureFileExists():
return None
with self._process_lock_getter() as acquired_plock:
if not acquired_plock:
return None
with open(self._filename, 'rb') as f:
file_contents = f.read().decode(encoding=self._encoding)
return file_contents
def LockedWrite(self, cache_data):
"""Acquire an interprocess lock and write a string.
This method safely acquires the locks then writes a string
to the cache file. If the string is written successfully
the function will return True, if the write fails for any
reason it will return False.
Args:
cache_data: string or bytes to write.
Returns:
bool: success
"""
if isinstance(cache_data, six.text_type):
cache_data = cache_data.encode(encoding=self._encoding)
with self._thread_lock:
if not self._EnsureFileExists():
return False
with self._process_lock_getter() as acquired_plock:
if not acquired_plock:
return False
with open(self._filename, 'wb') as f:
f.write(cache_data)
return True
def _EnsureFileExists(self):
"""Touches a file; returns False on error, True on success."""
if not os.path.exists(self._filename):
old_umask = os.umask(0o177)
try:
open(self._filename, 'a+b').close()
except OSError:
return False
finally:
os.umask(old_umask)
return True
# TODO(craigcitro): Push this into oauth2client.
def GetUserinfo(credentials, http=None): # pylint: disable=invalid-name
"""Get the userinfo associated with the given credentials.
This is dependent on the token having either the userinfo.email or
userinfo.profile scope for the given token.
Args:
credentials: (oauth2client.client.Credentials) incoming credentials
http: (httplib2.Http, optional) http instance to use
Returns:
The email address for this token, or None if the required scopes
aren't available.
"""
http = http or httplib2.Http()
url = _GetUserinfoUrl(credentials)
# We ignore communication woes here (i.e. SSL errors, socket
# timeout), as handling these should be done in a common location.
response, content = http.request(url)
if response.status == http_client.BAD_REQUEST:
credentials.refresh(http)
url = _GetUserinfoUrl(credentials)
response, content = http.request(url)
return json.loads(content or '{}') # Save ourselves from an empty reply.
def _GetUserinfoUrl(credentials):
url_root = 'https://oauth2.googleapis.com/tokeninfo'
query_args = {'access_token': credentials.access_token}
return '?'.join((url_root, urllib.parse.urlencode(query_args)))
@_RegisterCredentialsMethod
def _GetServiceAccountCredentials(
client_info, service_account_name=None, service_account_keyfile=None,
service_account_json_keyfile=None, **unused_kwds):
"""Returns ServiceAccountCredentials from give file."""
scopes = client_info['scope'].split()
user_agent = client_info['user_agent']
# Use the .json credentials, if provided.
if service_account_json_keyfile:
return ServiceAccountCredentialsFromFile(
service_account_json_keyfile, scopes, user_agent=user_agent)
# Fall back to .p12 if there's no .json credentials.
if ((service_account_name and not service_account_keyfile) or
(service_account_keyfile and not service_account_name)):
raise exceptions.CredentialsError(
'Service account name or keyfile provided without the other')
if service_account_name is not None:
return ServiceAccountCredentialsFromP12File(
service_account_name, service_account_keyfile, scopes, user_agent)
@_RegisterCredentialsMethod
def _GetGaeServiceAccount(client_info, **unused_kwds):
scopes = client_info['scope'].split(' ')
return GaeAssertionCredentials.Get(scopes=scopes)
@_RegisterCredentialsMethod
def _GetGceServiceAccount(client_info, **unused_kwds):
scopes = client_info['scope'].split(' ')
return GceAssertionCredentials.Get(scopes=scopes)
@_RegisterCredentialsMethod
def _GetApplicationDefaultCredentials(
client_info, skip_application_default_credentials=False,
**unused_kwds):
"""Returns ADC with right scopes."""
scopes = client_info['scope'].split()
if skip_application_default_credentials:
return None
gc = oauth2client.client.GoogleCredentials
with cache_file_lock:
try:
# pylint: disable=protected-access
# We've already done our own check for GAE/GCE
# credentials, we don't want to pay for checking again.
credentials = gc._implicit_credentials_from_files()
except oauth2client.client.ApplicationDefaultCredentialsError:
return None
# If we got back a non-service account credential, we need to use
# a heuristic to decide whether or not the application default
# credential will work for us. We assume that if we're requesting
# cloud-platform, our scopes are a subset of cloud scopes, and the
# ADC will work.
cp = 'https://www.googleapis.com/auth/cloud-platform'
if credentials is None:
return None
if not isinstance(credentials, gc) or cp in scopes:
return credentials.create_scoped(scopes)
return None
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/gzip_test.py | apitools/base/py/gzip_test.py | # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
# 2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software Foundation; All
# Rights Reserved
#
# This is a backport from python 3.4 into python 2.7. Text and exclusive mode
# support are removed as they're unsupported in 2.7. This backport patches a
# streaming bug that exists in python 2.7.
"""Test script for the gzip module.
"""
import six
from six.moves import range
import unittest
import os
import io
import struct
from apitools.base.py import gzip
from io import open
data1 = b""" int length=DEFAULTALLOC, err = Z_OK;
PyObject *RetVal;
int flushmode = Z_FINISH;
unsigned long start_total_out;
"""
data2 = b"""/* zlibmodule.c -- gzip-compatible data compression */
/* See http://www.gzip.org/zlib/
/* See http://www.winimage.com/zLibDll for Windows */
"""
def unlink(filename):
try:
os.unlink(filename)
except:
pass
class UnseekableIO(io.BytesIO):
def seekable(self):
return False
def tell(self):
raise io.UnsupportedOperation
def seek(self, *args):
raise io.UnsupportedOperation
class BaseTest(unittest.TestCase):
filename = "@test"
def setUp(self):
unlink(self.filename)
def tearDown(self):
unlink(self.filename)
class TestGzip(BaseTest):
def write_and_read_back(self, data, mode='b'):
b_data = bytes(data)
with gzip.GzipFile(self.filename, 'w' + mode) as f:
l = f.write(data)
self.assertEqual(l, len(b_data))
with gzip.GzipFile(self.filename, 'r' + mode) as f:
self.assertEqual(f.read(), b_data)
def test_write(self):
with gzip.GzipFile(self.filename, 'wb') as f:
f.write(data1 * 50)
# Try flush and fileno.
f.flush()
f.fileno()
if hasattr(os, 'fsync'):
os.fsync(f.fileno())
f.close()
# Test multiple close() calls.
f.close()
# The following test_write_xy methods test that write accepts
# the corresponding bytes-like object type as input
# and that the data written equals bytes(xy) in all cases.
def test_write_memoryview(self):
data = memoryview(data1 * 50)
self.write_and_read_back(data.tobytes())
data = memoryview(bytes(range(256)))
self.write_and_read_back(data.tobytes())
def test_write_incompatible_type(self):
# Test that non-bytes-like types raise TypeError.
# Issue #21560: attempts to write incompatible types
# should not affect the state of the fileobject
with gzip.GzipFile(self.filename, 'wb') as f:
if six.PY2:
with self.assertRaises(UnicodeEncodeError):
f.write(u'\xff')
elif six.PY3:
with self.assertRaises(TypeError):
f.write(u'\xff')
with self.assertRaises(TypeError):
f.write([1])
f.write(data1)
with gzip.GzipFile(self.filename, 'rb') as f:
self.assertEqual(f.read(), data1)
def test_read(self):
self.test_write()
# Try reading.
with gzip.GzipFile(self.filename, 'r') as f:
d = f.read()
self.assertEqual(d, data1 * 50)
def test_read1(self):
self.test_write()
blocks = []
nread = 0
with gzip.GzipFile(self.filename, 'r') as f:
while True:
d = f.read1()
if not d:
break
blocks.append(d)
nread += len(d)
# Check that position was updated correctly (see issue10791).
self.assertEqual(f.tell(), nread)
self.assertEqual(b''.join(blocks), data1 * 50)
def test_io_on_closed_object(self):
# Test that I/O operations on closed GzipFile objects raise a
# ValueError, just like the corresponding functions on file objects.
# Write to a file, open it for reading, then close it.
self.test_write()
f = gzip.GzipFile(self.filename, 'r')
f.close()
with self.assertRaises(ValueError):
f.read(1)
with self.assertRaises(ValueError):
f.seek(0)
with self.assertRaises(ValueError):
f.tell()
# Open the file for writing, then close it.
f = gzip.GzipFile(self.filename, 'w')
f.close()
with self.assertRaises(ValueError):
f.write(b'')
with self.assertRaises(ValueError):
f.flush()
def test_append(self):
self.test_write()
# Append to the previous file
with gzip.GzipFile(self.filename, 'ab') as f:
f.write(data2 * 15)
with gzip.GzipFile(self.filename, 'rb') as f:
d = f.read()
self.assertEqual(d, (data1 * 50) + (data2 * 15))
def test_many_append(self):
# Bug #1074261 was triggered when reading a file that contained
# many, many members. Create such a file and verify that reading it
# works.
with gzip.GzipFile(self.filename, 'wb', 9) as f:
f.write(b'a')
for i in range(0, 200):
with gzip.GzipFile(self.filename, "ab", 9) as f: # append
f.write(b'a')
# Try reading the file
with gzip.GzipFile(self.filename, "rb") as zgfile:
contents = b""
while 1:
ztxt = zgfile.read(8192)
contents += ztxt
if not ztxt:
break
self.assertEqual(contents, b'a' * 201)
def test_buffered_reader(self):
# Issue #7471: a GzipFile can be wrapped in a BufferedReader for
# performance.
self.test_write()
with gzip.GzipFile(self.filename, 'rb') as f:
with io.BufferedReader(f) as r:
lines = [line for line in r]
self.assertEqual(lines, 50 * data1.splitlines(True))
def test_readline(self):
self.test_write()
# Try .readline() with varying line lengths
with gzip.GzipFile(self.filename, 'rb') as f:
line_length = 0
while 1:
L = f.readline(line_length)
if not L and line_length != 0:
break
self.assertTrue(len(L) <= line_length)
line_length = (line_length + 1) % 50
def test_readlines(self):
self.test_write()
# Try .readlines()
with gzip.GzipFile(self.filename, 'rb') as f:
L = f.readlines()
with gzip.GzipFile(self.filename, 'rb') as f:
while 1:
L = f.readlines(150)
if L == []:
break
def test_seek_read(self):
self.test_write()
# Try seek, read test
with gzip.GzipFile(self.filename) as f:
while 1:
oldpos = f.tell()
line1 = f.readline()
if not line1:
break
newpos = f.tell()
f.seek(oldpos) # negative seek
if len(line1) > 10:
amount = 10
else:
amount = len(line1)
line2 = f.read(amount)
self.assertEqual(line1[:amount], line2)
f.seek(newpos) # positive seek
def test_seek_whence(self):
self.test_write()
# Try seek(whence=1), read test
with gzip.GzipFile(self.filename) as f:
f.read(10)
f.seek(10, whence=1)
y = f.read(10)
self.assertEqual(y, data1[20:30])
def test_seek_write(self):
# Try seek, write test
with gzip.GzipFile(self.filename, 'w') as f:
for pos in range(0, 256, 16):
f.seek(pos)
f.write(b'GZ\n')
def test_mode(self):
self.test_write()
with gzip.GzipFile(self.filename, 'r') as f:
self.assertEqual(f.myfileobj.mode, 'rb')
def test_1647484(self):
for mode in ('wb', 'rb'):
with gzip.GzipFile(self.filename, mode) as f:
self.assertTrue(hasattr(f, "name"))
self.assertEqual(f.name, self.filename)
def test_paddedfile_getattr(self):
self.test_write()
with gzip.GzipFile(self.filename, 'rb') as f:
self.assertTrue(hasattr(f.fileobj, "name"))
self.assertEqual(f.fileobj.name, self.filename)
def test_mtime(self):
mtime = 123456789
with gzip.GzipFile(self.filename, 'w', mtime=mtime) as fWrite:
fWrite.write(data1)
with gzip.GzipFile(self.filename) as fRead:
dataRead = fRead.read()
self.assertEqual(dataRead, data1)
self.assertTrue(hasattr(fRead, 'mtime'))
self.assertEqual(fRead.mtime, mtime)
def test_metadata(self):
mtime = 123456789
with gzip.GzipFile(self.filename, 'w', mtime=mtime) as fWrite:
fWrite.write(data1)
with open(self.filename, 'rb') as fRead:
# see RFC 1952: http://www.faqs.org/rfcs/rfc1952.html
idBytes = fRead.read(2)
self.assertEqual(idBytes, b'\x1f\x8b') # gzip ID
cmByte = fRead.read(1)
self.assertEqual(cmByte, b'\x08') # deflate
flagsByte = fRead.read(1)
self.assertEqual(flagsByte, b'\x08') # only the FNAME flag is set
mtimeBytes = fRead.read(4)
self.assertEqual(mtimeBytes, struct.pack(
'<i', mtime)) # little-endian
xflByte = fRead.read(1)
self.assertEqual(xflByte, b'\x02') # maximum compression
osByte = fRead.read(1)
self.assertEqual(osByte, b'\xff') # OS "unknown" (OS-independent)
# Since the FNAME flag is set, the zero-terminated filename
# follows. RFC 1952 specifies that this is the name of the input
# file, if any. However, the gzip module defaults to storing the
# name of the output file in this field.
expected = self.filename.encode('Latin-1') + b'\x00'
nameBytes = fRead.read(len(expected))
self.assertEqual(nameBytes, expected)
# Since no other flags were set, the header ends here. Rather than
# process the compressed data, let's seek to the trailer.
fRead.seek(os.stat(self.filename).st_size - 8)
crc32Bytes = fRead.read(4) # CRC32 of uncompressed data [data1]
self.assertEqual(crc32Bytes, b'\xaf\xd7d\x83')
isizeBytes = fRead.read(4)
self.assertEqual(isizeBytes, struct.pack('<i', len(data1)))
def test_with_open(self):
# GzipFile supports the context management protocol
with gzip.GzipFile(self.filename, "wb") as f:
f.write(b"xxx")
f = gzip.GzipFile(self.filename, "rb")
f.close()
try:
with f:
pass
except ValueError:
pass
else:
self.fail("__enter__ on a closed file didn't raise an exception")
try:
with gzip.GzipFile(self.filename, "wb") as f:
1 / 0
except ZeroDivisionError:
pass
else:
self.fail("1/0 didn't raise an exception")
def test_zero_padded_file(self):
with gzip.GzipFile(self.filename, "wb") as f:
f.write(data1 * 50)
# Pad the file with zeroes
with open(self.filename, "ab") as f:
f.write(b"\x00" * 50)
with gzip.GzipFile(self.filename, "rb") as f:
d = f.read()
self.assertEqual(d, data1 * 50, "Incorrect data in file")
def test_non_seekable_file(self):
uncompressed = data1 * 50
buf = UnseekableIO()
with gzip.GzipFile(fileobj=buf, mode="wb") as f:
f.write(uncompressed)
compressed = buf.getvalue()
buf = UnseekableIO(compressed)
with gzip.GzipFile(fileobj=buf, mode="rb") as f:
self.assertEqual(f.read(), uncompressed)
def test_peek(self):
uncompressed = data1 * 200
with gzip.GzipFile(self.filename, "wb") as f:
f.write(uncompressed)
def sizes():
while True:
for n in range(5, 50, 10):
yield n
with gzip.GzipFile(self.filename, "rb") as f:
f.max_read_chunk = 33
nread = 0
for n in sizes():
s = f.peek(n)
if s == b'':
break
self.assertEqual(f.read(len(s)), s)
nread += len(s)
self.assertEqual(f.read(100), b'')
self.assertEqual(nread, len(uncompressed))
def test_textio_readlines(self):
# Issue #10791: TextIOWrapper.readlines() fails when wrapping GzipFile.
lines = (data1 * 50).decode("ascii").splitlines(True)
self.test_write()
with gzip.GzipFile(self.filename, 'r') as f:
with io.TextIOWrapper(f, encoding="ascii") as t:
self.assertEqual(t.readlines(), lines)
def test_fileobj_from_fdopen(self):
# Issue #13781: Opening a GzipFile for writing fails when using a
# fileobj created with os.fdopen().
fd = os.open(self.filename, os.O_WRONLY | os.O_CREAT)
with os.fdopen(fd, "wb") as f:
with gzip.GzipFile(fileobj=f, mode="w") as g:
pass
def test_bytes_filename(self):
str_filename = self.filename
try:
bytes_filename = str_filename.encode("ascii")
except UnicodeEncodeError:
self.skipTest("Temporary file name needs to be ASCII")
with gzip.GzipFile(bytes_filename, "wb") as f:
f.write(data1 * 50)
with gzip.GzipFile(bytes_filename, "rb") as f:
self.assertEqual(f.read(), data1 * 50)
# Sanity check that we are actually operating on the right file.
with gzip.GzipFile(str_filename, "rb") as f:
self.assertEqual(f.read(), data1 * 50)
# Testing compress/decompress shortcut functions
def test_compress(self):
for data in [data1, data2]:
for args in [(), (1,), (6,), (9,)]:
datac = gzip.compress(data, *args)
self.assertEqual(type(datac), bytes)
with gzip.GzipFile(fileobj=io.BytesIO(datac), mode="rb") as f:
self.assertEqual(f.read(), data)
def test_decompress(self):
for data in (data1, data2):
buf = io.BytesIO()
with gzip.GzipFile(fileobj=buf, mode="wb") as f:
f.write(data)
self.assertEqual(gzip.decompress(buf.getvalue()), data)
# Roundtrip with compress
datac = gzip.compress(data)
self.assertEqual(gzip.decompress(datac), data)
def test_read_truncated(self):
data = data1 * 50
# Drop the CRC (4 bytes) and file size (4 bytes).
truncated = gzip.compress(data)[:-8]
with gzip.GzipFile(fileobj=io.BytesIO(truncated)) as f:
self.assertRaises(EOFError, f.read)
with gzip.GzipFile(fileobj=io.BytesIO(truncated)) as f:
self.assertEqual(f.read(len(data)), data)
self.assertRaises(EOFError, f.read, 1)
# Incomplete 10-byte header.
for i in range(2, 10):
with gzip.GzipFile(fileobj=io.BytesIO(truncated[:i])) as f:
self.assertRaises(EOFError, f.read, 1)
def test_read_with_extra(self):
# Gzip data with an extra field
gzdata = (b'\x1f\x8b\x08\x04\xb2\x17cQ\x02\xff'
b'\x05\x00Extra'
b'\x0bI-.\x01\x002\xd1Mx\x04\x00\x00\x00')
with gzip.GzipFile(fileobj=io.BytesIO(gzdata)) as f:
self.assertEqual(f.read(), b'Test')
def test_prepend_error(self):
# See issue #20875
with gzip.open(self.filename, "wb") as f:
f.write(data1)
with gzip.open(self.filename, "rb") as f:
f.fileobj.prepend()
class TestOpen(BaseTest):
def test_binary_modes(self):
uncompressed = data1 * 50
with gzip.open(self.filename, "wb") as f:
f.write(uncompressed)
with open(self.filename, "rb") as f:
file_data = gzip.decompress(f.read())
self.assertEqual(file_data, uncompressed)
with gzip.open(self.filename, "rb") as f:
self.assertEqual(f.read(), uncompressed)
with gzip.open(self.filename, "ab") as f:
f.write(uncompressed)
with open(self.filename, "rb") as f:
file_data = gzip.decompress(f.read())
self.assertEqual(file_data, uncompressed * 2)
def test_implicit_binary_modes(self):
# Test implicit binary modes (no "b" or "t" in mode string).
uncompressed = data1 * 50
with gzip.open(self.filename, "w") as f:
f.write(uncompressed)
with open(self.filename, "rb") as f:
file_data = gzip.decompress(f.read())
self.assertEqual(file_data, uncompressed)
with gzip.open(self.filename, "r") as f:
self.assertEqual(f.read(), uncompressed)
with gzip.open(self.filename, "a") as f:
f.write(uncompressed)
with open(self.filename, "rb") as f:
file_data = gzip.decompress(f.read())
self.assertEqual(file_data, uncompressed * 2)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/util.py | apitools/base/py/util.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Assorted utilities shared between parts of apitools."""
import os
import random
import six
from six.moves import http_client
import six.moves.urllib.error as urllib_error
import six.moves.urllib.parse as urllib_parse
import six.moves.urllib.request as urllib_request
from apitools.base.protorpclite import messages
from apitools.base.py import encoding_helper as encoding
from apitools.base.py import exceptions
if six.PY3:
from collections.abc import Iterable
else:
from collections import Iterable
__all__ = [
'DetectGae',
'DetectGce',
]
_RESERVED_URI_CHARS = r":/?#[]@!$&'()*+,;="
def DetectGae():
"""Determine whether or not we're running on GAE.
This is based on:
https://developers.google.com/appengine/docs/python/#The_Environment
Returns:
True iff we're running on GAE.
"""
server_software = os.environ.get('SERVER_SOFTWARE', '')
return (server_software.startswith('Development/') or
server_software.startswith('Google App Engine/'))
def DetectGce():
"""Determine whether or not we're running on GCE.
This is based on:
https://cloud.google.com/compute/docs/metadata#runninggce
Returns:
True iff we're running on a GCE instance.
"""
metadata_url = 'http://{}'.format(
os.environ.get('GCE_METADATA_ROOT', 'metadata.google.internal'))
try:
o = urllib_request.build_opener(urllib_request.ProxyHandler({})).open(
urllib_request.Request(
metadata_url, headers={'Metadata-Flavor': 'Google'}))
except urllib_error.URLError:
return False
return (o.getcode() == http_client.OK and
o.headers.get('metadata-flavor') == 'Google')
def NormalizeScopes(scope_spec):
"""Normalize scope_spec to a set of strings."""
if isinstance(scope_spec, six.string_types):
scope_spec = six.ensure_str(scope_spec)
return set(scope_spec.split(' '))
elif isinstance(scope_spec, Iterable):
scope_spec = [six.ensure_str(x) for x in scope_spec]
return set(scope_spec)
raise exceptions.TypecheckError(
'NormalizeScopes expected string or iterable, found %s' % (
type(scope_spec),))
def Typecheck(arg, arg_type, msg=None):
if not isinstance(arg, arg_type):
if msg is None:
if isinstance(arg_type, tuple):
msg = 'Type of arg is "%s", not one of %r' % (
type(arg), arg_type)
else:
msg = 'Type of arg is "%s", not "%s"' % (type(arg), arg_type)
raise exceptions.TypecheckError(msg)
return arg
def ExpandRelativePath(method_config, params, relative_path=None):
"""Determine the relative path for request."""
path = relative_path or method_config.relative_path or ''
for param in method_config.path_params:
param_template = '{%s}' % param
# For more details about "reserved word expansion", see:
# http://tools.ietf.org/html/rfc6570#section-3.2.2
reserved_chars = ''
reserved_template = '{+%s}' % param
if reserved_template in path:
reserved_chars = _RESERVED_URI_CHARS
path = path.replace(reserved_template, param_template)
if param_template not in path:
raise exceptions.InvalidUserInputError(
'Missing path parameter %s' % param)
try:
# TODO(craigcitro): Do we want to support some sophisticated
# mapping here?
value = params[param]
except KeyError:
raise exceptions.InvalidUserInputError(
'Request missing required parameter %s' % param)
if value is None:
raise exceptions.InvalidUserInputError(
'Request missing required parameter %s' % param)
try:
if not isinstance(value, six.string_types):
value = str(value)
path = path.replace(param_template,
urllib_parse.quote(value.encode('utf_8'),
reserved_chars))
except TypeError as e:
raise exceptions.InvalidUserInputError(
'Error setting required parameter %s to value %s: %s' % (
param, value, e))
return path
def CalculateWaitForRetry(retry_attempt, max_wait=60):
"""Calculates amount of time to wait before a retry attempt.
Wait time grows exponentially with the number of attempts. A
random amount of jitter is added to spread out retry attempts from
different clients.
Args:
retry_attempt: Retry attempt counter.
max_wait: Upper bound for wait time [seconds].
Returns:
Number of seconds to wait before retrying request.
"""
wait_time = 2 ** retry_attempt
max_jitter = wait_time / 4.0
wait_time += random.uniform(-max_jitter, max_jitter)
return max(1, min(wait_time, max_wait))
def AcceptableMimeType(accept_patterns, mime_type):
"""Return True iff mime_type is acceptable for one of accept_patterns.
Note that this function assumes that all patterns in accept_patterns
will be simple types of the form "type/subtype", where one or both
of these can be "*". We do not support parameters (i.e. "; q=") in
patterns.
Args:
accept_patterns: list of acceptable MIME types.
mime_type: the mime type we would like to match.
Returns:
Whether or not mime_type matches (at least) one of these patterns.
"""
if '/' not in mime_type:
raise exceptions.InvalidUserInputError(
'Invalid MIME type: "%s"' % mime_type)
unsupported_patterns = [p for p in accept_patterns if ';' in p]
if unsupported_patterns:
raise exceptions.GeneratedClientError(
'MIME patterns with parameter unsupported: "%s"' % ', '.join(
unsupported_patterns))
def MimeTypeMatches(pattern, mime_type):
"""Return True iff mime_type is acceptable for pattern."""
# Some systems use a single '*' instead of '*/*'.
if pattern == '*':
pattern = '*/*'
return all(accept in ('*', provided) for accept, provided
in zip(pattern.split('/'), mime_type.split('/')))
return any(MimeTypeMatches(pattern, mime_type)
for pattern in accept_patterns)
def MapParamNames(params, request_type):
"""Reverse parameter remappings for URL construction."""
return [encoding.GetCustomJsonFieldMapping(request_type, json_name=p) or p
for p in params]
def MapRequestParams(params, request_type):
"""Perform any renames/remappings needed for URL construction.
Currently, we have several ways to customize JSON encoding, in
particular of field names and enums. This works fine for JSON
bodies, but also needs to be applied for path and query parameters
in the URL.
This function takes a dictionary from param names to values, and
performs any registered mappings. We also need the request type (to
look up the mappings).
Args:
params: (dict) Map from param names to values
request_type: (protorpc.messages.Message) request type for this API call
Returns:
A new dict of the same size, with all registered mappings applied.
"""
new_params = dict(params)
for param_name, value in params.items():
field_remapping = encoding.GetCustomJsonFieldMapping(
request_type, python_name=param_name)
if field_remapping is not None:
new_params[field_remapping] = new_params.pop(param_name)
param_name = field_remapping
if isinstance(value, messages.Enum):
new_params[param_name] = encoding.GetCustomJsonEnumMapping(
type(value), python_name=str(value)) or str(value)
return new_params
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/gzip.py | apitools/base/py/gzip.py | # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
# 2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software Foundation; All
# Rights Reserved
#
# This is a backport from python 3.4 into python 2.7. Text and exclusive mode
# support are removed as they're unsupported in 2.7. This backport patches a
# streaming bug that exists in python 2.7.
"""Functions that read and write gzipped files.
The user of the file doesn't have to worry about the compression,
but random access is not allowed."""
# based on Andrew Kuchling's minigzip.py distributed with the zlib module
import six
from six.moves import builtins
from six.moves import range
import struct
import sys
import time
import os
import zlib
import io
__all__ = ["GzipFile", "open", "compress", "decompress"]
FTEXT, FHCRC, FEXTRA, FNAME, FCOMMENT = 1, 2, 4, 8, 16
READ, WRITE = 1, 2
def open(filename, mode="rb", compresslevel=9):
"""Shorthand for GzipFile(filename, mode, compresslevel).
The filename argument is required; mode defaults to 'rb'
and compresslevel defaults to 9.
"""
return GzipFile(filename, mode, compresslevel)
def write32u(output, value):
# The L format writes the bit pattern correctly whether signed
# or unsigned.
output.write(struct.pack("<L", value))
class _PaddedFile(object):
"""Minimal read-only file object that prepends a string to the contents
of an actual file. Shouldn't be used outside of gzip.py, as it lacks
essential functionality."""
def __init__(self, f, prepend=b''):
self._buffer = prepend
self._length = len(prepend)
self.file = f
self._read = 0
def read(self, size):
if self._read is None:
return self.file.read(size)
if self._read + size <= self._length:
read = self._read
self._read += size
return self._buffer[read:self._read]
else:
read = self._read
self._read = None
return self._buffer[read:] + \
self.file.read(size - self._length + read)
def prepend(self, prepend=b'', readprevious=False):
if self._read is None:
self._buffer = prepend
elif readprevious and len(prepend) <= self._read:
self._read -= len(prepend)
return
else:
self._buffer = self._buffer[self._read:] + prepend
self._length = len(self._buffer)
self._read = 0
def unused(self):
if self._read is None:
return b''
return self._buffer[self._read:]
def seek(self, offset, whence=0):
# This is only ever called with offset=whence=0
if whence == 1 and self._read is not None:
if 0 <= offset + self._read <= self._length:
self._read += offset
return
else:
offset += self._length - self._read
self._read = None
self._buffer = None
return self.file.seek(offset, whence)
def __getattr__(self, name):
return getattr(self.file, name)
class GzipFile(io.BufferedIOBase):
"""The GzipFile class simulates most of the methods of a file object with
the exception of the readinto() and truncate() methods.
This class only supports opening files in binary mode. If you need to open
a compressed file in text mode, use the gzip.open() function.
"""
myfileobj = None
max_read_chunk = 10 * 1024 * 1024 # 10Mb
def __init__(self, filename=None, mode=None,
compresslevel=9, fileobj=None, mtime=None):
"""Constructor for the GzipFile class.
At least one of fileobj and filename must be given a
non-trivial value.
The new class instance is based on fileobj, which can be a regular
file, an io.BytesIO object, or any other object which simulates a file.
It defaults to None, in which case filename is opened to provide
a file object.
When fileobj is not None, the filename argument is only used to be
included in the gzip file header, which may includes the original
filename of the uncompressed file. It defaults to the filename of
fileobj, if discernible; otherwise, it defaults to the empty string,
and in this case the original filename is not included in the header.
The mode argument can be any of 'r', 'rb', 'a', 'ab', 'w', or 'wb',
depending on whether the file will be read or written. The default
is the mode of fileobj if discernible; otherwise, the default is 'rb'.
A mode of 'r' is equivalent to one of 'rb', and similarly for 'w' and
'wb', and 'a' and 'ab'.
The compresslevel argument is an integer from 0 to 9 controlling the
level of compression; 1 is fastest and produces the least compression,
and 9 is slowest and produces the most compression. 0 is no compression
at all. The default is 9.
The mtime argument is an optional numeric timestamp to be written
to the stream when compressing. All gzip compressed streams
are required to contain a timestamp. If omitted or None, the
current time is used. This module ignores the timestamp when
decompressing; however, some programs, such as gunzip, make use
of it. The format of the timestamp is the same as that of the
return value of time.time() and of the st_mtime member of the
object returned by os.stat().
"""
if mode and ('t' in mode or 'U' in mode):
raise ValueError("Invalid mode: {!r}".format(mode))
if mode and 'b' not in mode:
mode += 'b'
if fileobj is None:
fileobj = self.myfileobj = builtins.open(filename, mode or 'rb')
if filename is None:
filename = getattr(fileobj, 'name', '')
if not isinstance(filename, six.string_types):
filename = ''
if mode is None:
mode = getattr(fileobj, 'mode', 'rb')
if mode.startswith('r'):
self.mode = READ
# Set flag indicating start of a new member
self._new_member = True
# Buffer data read from gzip file. extrastart is offset in
# stream where buffer starts. extrasize is number of
# bytes remaining in buffer from current stream position.
self.extrabuf = b""
self.extrasize = 0
self.extrastart = 0
self.name = filename
# Starts small, scales exponentially
self.min_readsize = 100
fileobj = _PaddedFile(fileobj)
elif mode.startswith(('w', 'a')):
self.mode = WRITE
self._init_write(filename)
self.compress = zlib.compressobj(compresslevel,
zlib.DEFLATED,
-zlib.MAX_WBITS,
zlib.DEF_MEM_LEVEL,
0)
else:
raise ValueError("Invalid mode: {!r}".format(mode))
self.fileobj = fileobj
self.offset = 0
self.mtime = mtime
if self.mode == WRITE:
self._write_gzip_header()
@property
def filename(self):
import warnings
warnings.warn("use the name attribute", DeprecationWarning, 2)
if self.mode == WRITE and self.name[-3:] != ".gz":
return self.name + ".gz"
return self.name
def __repr__(self):
fileobj = self.fileobj
if isinstance(fileobj, _PaddedFile):
fileobj = fileobj.file
s = repr(fileobj)
return '<gzip ' + s[1:-1] + ' ' + hex(id(self)) + '>'
def _check_closed(self):
"""Raises a ValueError if the underlying file object has been closed.
"""
if self.closed:
raise ValueError('I/O operation on closed file.')
def _init_write(self, filename):
self.name = filename
self.crc = zlib.crc32(b"") & 0xffffffff
self.size = 0
self.writebuf = []
self.bufsize = 0
def _write_gzip_header(self):
self.fileobj.write(b'\037\213') # magic header
self.fileobj.write(b'\010') # compression method
try:
# RFC 1952 requires the FNAME field to be Latin-1. Do not
# include filenames that cannot be represented that way.
fname = os.path.basename(self.name)
if not isinstance(fname, six.binary_type):
fname = fname.encode('latin-1')
if fname.endswith(b'.gz'):
fname = fname[:-3]
except UnicodeEncodeError:
fname = b''
flags = 0
if fname:
flags = FNAME
self.fileobj.write(six.unichr(flags).encode('latin-1'))
mtime = self.mtime
if mtime is None:
mtime = time.time()
write32u(self.fileobj, int(mtime))
self.fileobj.write(b'\002')
self.fileobj.write(b'\377')
if fname:
self.fileobj.write(fname + b'\000')
def _init_read(self):
self.crc = zlib.crc32(b"") & 0xffffffff
self.size = 0
def _read_exact(self, n):
data = self.fileobj.read(n)
while len(data) < n:
b = self.fileobj.read(n - len(data))
if not b:
raise EOFError("Compressed file ended before the "
"end-of-stream marker was reached")
data += b
return data
def _read_gzip_header(self):
magic = self.fileobj.read(2)
if magic == b'':
return False
if magic != b'\037\213':
raise OSError('Not a gzipped file')
method, flag, self.mtime = struct.unpack("<BBIxx", self._read_exact(8))
if method != 8:
raise OSError('Unknown compression method')
if flag & FEXTRA:
# Read & discard the extra field, if present
extra_len, = struct.unpack("<H", self._read_exact(2))
self._read_exact(extra_len)
if flag & FNAME:
# Read and discard a null-terminated string containing the filename
while True:
s = self.fileobj.read(1)
if not s or s == b'\000':
break
if flag & FCOMMENT:
# Read and discard a null-terminated string containing a comment
while True:
s = self.fileobj.read(1)
if not s or s == b'\000':
break
if flag & FHCRC:
self._read_exact(2) # Read & discard the 16-bit header CRC
unused = self.fileobj.unused()
if unused:
uncompress = self.decompress.decompress(unused)
self._add_read_data(uncompress)
return True
def write(self, data):
self._check_closed()
if self.mode != WRITE:
import errno
raise OSError(errno.EBADF, "write() on read-only GzipFile object")
if self.fileobj is None:
raise ValueError("write() on closed GzipFile object")
# Convert data type if called by io.BufferedWriter.
if isinstance(data, memoryview):
data = data.tobytes()
if len(data) > 0:
self.fileobj.write(self.compress.compress(data))
self.size += len(data)
self.crc = zlib.crc32(data, self.crc) & 0xffffffff
self.offset += len(data)
return len(data)
def read(self, size=-1):
self._check_closed()
if self.mode != READ:
import errno
raise OSError(errno.EBADF, "read() on write-only GzipFile object")
if self.extrasize <= 0 and self.fileobj is None:
return b''
readsize = 1024
if size < 0: # get the whole thing
while self._read(readsize):
readsize = min(self.max_read_chunk, readsize * 2)
size = self.extrasize
else: # just get some more of it
while size > self.extrasize:
if not self._read(readsize):
if size > self.extrasize:
size = self.extrasize
break
readsize = min(self.max_read_chunk, readsize * 2)
offset = self.offset - self.extrastart
chunk = self.extrabuf[offset: offset + size]
self.extrasize = self.extrasize - size
self.offset += size
return chunk
def read1(self, size=-1):
self._check_closed()
if self.mode != READ:
import errno
raise OSError(errno.EBADF, "read1() on write-only GzipFile object")
if self.extrasize <= 0 and self.fileobj is None:
return b''
# For certain input data, a single call to _read() may not return
# any data. In this case, retry until we get some data or reach EOF.
while self.extrasize <= 0 and self._read():
pass
if size < 0 or size > self.extrasize:
size = self.extrasize
offset = self.offset - self.extrastart
chunk = self.extrabuf[offset: offset + size]
self.extrasize -= size
self.offset += size
return chunk
def peek(self, n):
if self.mode != READ:
import errno
raise OSError(errno.EBADF, "peek() on write-only GzipFile object")
# Do not return ridiculously small buffers, for one common idiom
# is to call peek(1) and expect more bytes in return.
if n < 100:
n = 100
if self.extrasize == 0:
if self.fileobj is None:
return b''
# Ensure that we don't return b"" if we haven't reached EOF.
# 1024 is the same buffering heuristic used in read()
while self.extrasize == 0 and self._read(max(n, 1024)):
pass
offset = self.offset - self.extrastart
remaining = self.extrasize
assert remaining == len(self.extrabuf) - offset
return self.extrabuf[offset:offset + n]
def _unread(self, buf):
self.extrasize = len(buf) + self.extrasize
self.offset -= len(buf)
def _read(self, size=1024):
if self.fileobj is None:
return False
if self._new_member:
# If the _new_member flag is set, we have to
# jump to the next member, if there is one.
self._init_read()
if not self._read_gzip_header():
return False
self.decompress = zlib.decompressobj(-zlib.MAX_WBITS)
self._new_member = False
# Read a chunk of data from the file
buf = self.fileobj.read(size)
# If the EOF has been reached, flush the decompression object
# and mark this object as finished.
if buf == b"":
uncompress = self.decompress.flush()
# Prepend the already read bytes to the fileobj to they can be
# seen by _read_eof()
self.fileobj.prepend(self.decompress.unused_data, True)
self._read_eof()
self._add_read_data(uncompress)
return False
uncompress = self.decompress.decompress(buf)
self._add_read_data(uncompress)
if self.decompress.unused_data != b"":
# Ending case: we've come to the end of a member in the file,
# so seek back to the start of the unused data, finish up
# this member, and read a new gzip header.
# Prepend the already read bytes to the fileobj to they can be
# seen by _read_eof() and _read_gzip_header()
self.fileobj.prepend(self.decompress.unused_data, True)
# Check the CRC and file size, and set the flag so we read
# a new member on the next call
self._read_eof()
self._new_member = True
return True
def _add_read_data(self, data):
self.crc = zlib.crc32(data, self.crc) & 0xffffffff
offset = self.offset - self.extrastart
self.extrabuf = self.extrabuf[offset:] + data
self.extrasize = self.extrasize + len(data)
self.extrastart = self.offset
self.size = self.size + len(data)
def _read_eof(self):
# We've read to the end of the file
# We check the that the computed CRC and size of the
# uncompressed data matches the stored values. Note that the size
# stored is the true file size mod 2**32.
crc32, isize = struct.unpack("<II", self._read_exact(8))
if crc32 != self.crc:
raise OSError("CRC check failed %s != %s" % (hex(crc32),
hex(self.crc)))
elif isize != (self.size & 0xffffffff):
raise OSError("Incorrect length of data produced")
# Gzip files can be padded with zeroes and still have archives.
# Consume all zero bytes and set the file position to the first
# non-zero byte. See http://www.gzip.org/#faq8
c = b"\x00"
while c == b"\x00":
c = self.fileobj.read(1)
if c:
self.fileobj.prepend(c, True)
@property
def closed(self):
return self.fileobj is None
def close(self):
fileobj = self.fileobj
if fileobj is None:
return
self.fileobj = None
try:
if self.mode == WRITE:
fileobj.write(self.compress.flush())
write32u(fileobj, self.crc)
# self.size may exceed 2GB, or even 4GB
write32u(fileobj, self.size & 0xffffffff)
finally:
myfileobj = self.myfileobj
if myfileobj:
self.myfileobj = None
myfileobj.close()
def flush(self, zlib_mode=zlib.Z_SYNC_FLUSH):
self._check_closed()
if self.mode == WRITE:
# Ensure the compressor's buffer is flushed
self.fileobj.write(self.compress.flush(zlib_mode))
self.fileobj.flush()
def fileno(self):
"""Invoke the underlying file object's fileno() method.
This will raise AttributeError if the underlying file object
doesn't support fileno().
"""
return self.fileobj.fileno()
def rewind(self):
'''Return the uncompressed stream file position indicator to the
beginning of the file'''
if self.mode != READ:
raise OSError("Can't rewind in write mode")
self.fileobj.seek(0)
self._new_member = True
self.extrabuf = b""
self.extrasize = 0
self.extrastart = 0
self.offset = 0
def readable(self):
return self.mode == READ
def writable(self):
return self.mode == WRITE
def seekable(self):
return True
def seek(self, offset, whence=0):
if whence:
if whence == 1:
offset = self.offset + offset
else:
raise ValueError('Seek from end not supported')
if self.mode == WRITE:
if offset < self.offset:
raise OSError('Negative seek in write mode')
count = offset - self.offset
chunk = bytes(1024)
for i in range(count // 1024):
self.write(chunk)
self.write(bytes(count % 1024))
elif self.mode == READ:
if offset < self.offset:
# for negative seek, rewind and do positive seek
self.rewind()
count = offset - self.offset
for i in range(count // 1024):
self.read(1024)
self.read(count % 1024)
return self.offset
def readline(self, size=-1):
if size < 0:
# Shortcut common case - newline found in buffer.
offset = self.offset - self.extrastart
i = self.extrabuf.find(b'\n', offset) + 1
if i > 0:
self.extrasize -= i - offset
self.offset += i - offset
return self.extrabuf[offset: i]
size = sys.maxsize
readsize = self.min_readsize
else:
readsize = size
bufs = []
while size != 0:
c = self.read(readsize)
i = c.find(b'\n')
# We set i=size to break out of the loop under two
# conditions: 1) there's no newline, and the chunk is
# larger than size, or 2) there is a newline, but the
# resulting line would be longer than 'size'.
if (size <= i) or (i == -1 and len(c) > size):
i = size - 1
if i >= 0 or c == b'':
bufs.append(c[:i + 1]) # Add portion of last chunk
self._unread(c[i + 1:]) # Push back rest of chunk
break
# Append chunk to list, decrease 'size',
bufs.append(c)
size = size - len(c)
readsize = min(size, readsize * 2)
if readsize > self.min_readsize:
self.min_readsize = min(readsize, self.min_readsize * 2, 512)
return b''.join(bufs) # Return resulting line
def compress(data, compresslevel=9):
"""Compress data in one shot and return the compressed string.
Optional argument is the compression level, in range of 0-9.
"""
buf = io.BytesIO()
with GzipFile(fileobj=buf, mode='wb', compresslevel=compresslevel) as f:
f.write(data)
return buf.getvalue()
def decompress(data):
"""Decompress a gzip compressed string in one shot.
Return the decompressed string.
"""
with GzipFile(fileobj=io.BytesIO(data)) as f:
return f.read()
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/base_api_test.py | apitools/base/py/base_api_test.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import datetime
import sys
import contextlib
import unittest
import six
from six.moves import http_client
from six.moves import urllib_parse
from apitools.base.protorpclite import message_types
from apitools.base.protorpclite import messages
from apitools.base.py import base_api
from apitools.base.py import encoding
from apitools.base.py import exceptions
from apitools.base.py import http_wrapper
@contextlib.contextmanager
def mock(module, fn_name, patch):
unpatch = getattr(module, fn_name)
setattr(module, fn_name, patch)
try:
yield
finally:
setattr(module, fn_name, unpatch)
class SimpleMessage(messages.Message):
field = messages.StringField(1)
bytes_field = messages.BytesField(2)
class MessageWithTime(messages.Message):
timestamp = message_types.DateTimeField(1)
class MessageWithRemappings(messages.Message):
class AnEnum(messages.Enum):
value_one = 1
value_two = 2
str_field = messages.StringField(1)
enum_field = messages.EnumField('AnEnum', 2)
encoding.AddCustomJsonFieldMapping(
MessageWithRemappings, 'str_field', 'remapped_field')
encoding.AddCustomJsonEnumMapping(
MessageWithRemappings.AnEnum, 'value_one', 'ONE/TWO')
class StandardQueryParameters(messages.Message):
field = messages.StringField(1)
prettyPrint = messages.BooleanField(
5, default=True) # pylint: disable=invalid-name
pp = messages.BooleanField(6, default=True)
nextPageToken = messages.BytesField(7) # pylint:disable=invalid-name
class FakeCredentials(object):
def authorize(self, _): # pylint: disable=invalid-name
return None
class FakeClient(base_api.BaseApiClient):
MESSAGES_MODULE = sys.modules[__name__]
_PACKAGE = 'package'
_SCOPES = ['scope1']
_CLIENT_ID = 'client_id'
_CLIENT_SECRET = 'client_secret'
class FakeService(base_api.BaseApiService):
def __init__(self, client=None):
client = client or FakeClient(
'http://www.example.com/', credentials=FakeCredentials())
super(FakeService, self).__init__(client)
class BaseApiTest(unittest.TestCase):
def __GetFakeClient(self):
return FakeClient('', credentials=FakeCredentials())
def testUrlNormalization(self):
client = FakeClient('http://www.googleapis.com', get_credentials=False)
self.assertTrue(client.url.endswith('/'))
def testNoCredentials(self):
client = FakeClient('', get_credentials=False)
self.assertIsNotNone(client)
self.assertIsNone(client._credentials)
def testIncludeEmptyFieldsClient(self):
msg = SimpleMessage()
client = self.__GetFakeClient()
self.assertEqual('{}', client.SerializeMessage(msg))
with client.IncludeFields(('field',)):
self.assertEqual('{"field": null}', client.SerializeMessage(msg))
def testJsonResponse(self):
method_config = base_api.ApiMethodInfo(
response_type_name='SimpleMessage')
service = FakeService()
http_response = http_wrapper.Response(
info={'status': '200'}, content='{"field": "abc"}',
request_url='http://www.google.com')
response_message = SimpleMessage(field='abc')
self.assertEqual(response_message, service.ProcessHttpResponse(
method_config, http_response))
with service.client.JsonResponseModel():
self.assertEqual(
http_response.content,
service.ProcessHttpResponse(method_config, http_response))
def testJsonResponseEncoding(self):
# On Python 3, httplib2 always returns bytes, so we need to check that
# we can correctly decode the message content using the given encoding.
method_config = base_api.ApiMethodInfo(
response_type_name='SimpleMessage')
service = FakeService(FakeClient(
'http://www.example.com/', credentials=FakeCredentials(),
response_encoding='utf8'))
http_response = http_wrapper.Response(
info={'status': '200'}, content=b'{"field": "abc"}',
request_url='http://www.google.com')
response_message = SimpleMessage(field=u'abc')
self.assertEqual(response_message, service.ProcessHttpResponse(
method_config, http_response))
with service.client.JsonResponseModel():
self.assertEqual(
http_response.content.decode('utf8'),
service.ProcessHttpResponse(method_config, http_response))
def testAdditionalHeaders(self):
additional_headers = {'Request-Is-Awesome': '1'}
client = self.__GetFakeClient()
# No headers to start
http_request = http_wrapper.Request('http://www.example.com')
new_request = client.ProcessHttpRequest(http_request)
self.assertFalse('Request-Is-Awesome' in new_request.headers)
# Add a new header and ensure it's added to the request.
client.additional_http_headers = additional_headers
http_request = http_wrapper.Request('http://www.example.com')
new_request = client.ProcessHttpRequest(http_request)
self.assertTrue('Request-Is-Awesome' in new_request.headers)
def testCustomCheckResponse(self):
def check_response():
pass
def fakeMakeRequest(*_, **kwargs):
self.assertEqual(check_response, kwargs['check_response_func'])
return http_wrapper.Response(
info={'status': '200'}, content='{"field": "abc"}',
request_url='http://www.google.com')
method_config = base_api.ApiMethodInfo(
request_type_name='SimpleMessage',
response_type_name='SimpleMessage')
client = self.__GetFakeClient()
client.check_response_func = check_response
service = FakeService(client=client)
request = SimpleMessage()
with mock(base_api.http_wrapper, 'MakeRequest', fakeMakeRequest):
service._RunMethod(method_config, request)
def testCustomRetryFunc(self):
def retry_func():
pass
def fakeMakeRequest(*_, **kwargs):
self.assertEqual(retry_func, kwargs['retry_func'])
return http_wrapper.Response(
info={'status': '200'}, content='{"field": "abc"}',
request_url='http://www.google.com')
method_config = base_api.ApiMethodInfo(
request_type_name='SimpleMessage',
response_type_name='SimpleMessage')
client = self.__GetFakeClient()
client.retry_func = retry_func
service = FakeService(client=client)
request = SimpleMessage()
with mock(base_api.http_wrapper, 'MakeRequest', fakeMakeRequest):
service._RunMethod(method_config, request)
def testHttpError(self):
def fakeMakeRequest(*unused_args, **unused_kwargs):
return http_wrapper.Response(
info={'status': http_client.BAD_REQUEST},
content='{"field": "abc"}',
request_url='http://www.google.com')
method_config = base_api.ApiMethodInfo(
request_type_name='SimpleMessage',
response_type_name='SimpleMessage')
client = self.__GetFakeClient()
service = FakeService(client=client)
request = SimpleMessage()
with mock(base_api.http_wrapper, 'MakeRequest', fakeMakeRequest):
with self.assertRaises(exceptions.HttpBadRequestError) as err:
service._RunMethod(method_config, request)
http_error = err.exception
self.assertEqual('http://www.google.com', http_error.url)
self.assertEqual('{"field": "abc"}', http_error.content)
self.assertEqual(method_config, http_error.method_config)
self.assertEqual(request, http_error.request)
def testQueryEncoding(self):
method_config = base_api.ApiMethodInfo(
request_type_name='MessageWithTime', query_params=['timestamp'])
service = FakeService()
request = MessageWithTime(
timestamp=datetime.datetime(2014, 10, 0o7, 12, 53, 13))
http_request = service.PrepareHttpRequest(method_config, request)
url_timestamp = urllib_parse.quote(request.timestamp.isoformat())
self.assertTrue(http_request.url.endswith(url_timestamp))
def testPrettyPrintEncoding(self):
method_config = base_api.ApiMethodInfo(
request_type_name='MessageWithTime', query_params=['timestamp'])
service = FakeService()
request = MessageWithTime(
timestamp=datetime.datetime(2014, 10, 0o7, 12, 53, 13))
global_params = StandardQueryParameters()
http_request = service.PrepareHttpRequest(method_config, request,
global_params=global_params)
self.assertFalse('prettyPrint' in http_request.url)
self.assertFalse('pp' in http_request.url)
global_params.prettyPrint = False # pylint: disable=invalid-name
global_params.pp = False
http_request = service.PrepareHttpRequest(method_config, request,
global_params=global_params)
self.assertTrue('prettyPrint=0' in http_request.url)
self.assertTrue('pp=0' in http_request.url)
def testQueryBytesRequest(self):
method_config = base_api.ApiMethodInfo(
request_type_name='SimpleMessage', query_params=['bytes_field'])
service = FakeService()
non_unicode_message = b''.join((six.int2byte(100),
six.int2byte(200)))
request = SimpleMessage(bytes_field=non_unicode_message)
global_params = StandardQueryParameters()
http_request = service.PrepareHttpRequest(method_config, request,
global_params=global_params)
want = urllib_parse.urlencode({
'bytes_field': base64.urlsafe_b64encode(non_unicode_message),
})
self.assertIn(want, http_request.url)
def testQueryBytesGlobalParams(self):
method_config = base_api.ApiMethodInfo(
request_type_name='SimpleMessage', query_params=['bytes_field'])
service = FakeService()
non_unicode_message = b''.join((six.int2byte(100),
six.int2byte(200)))
request = SimpleMessage()
global_params = StandardQueryParameters(
nextPageToken=non_unicode_message)
http_request = service.PrepareHttpRequest(method_config, request,
global_params=global_params)
want = urllib_parse.urlencode({
'nextPageToken': base64.urlsafe_b64encode(non_unicode_message),
})
self.assertIn(want, http_request.url)
def testQueryRemapping(self):
method_config = base_api.ApiMethodInfo(
request_type_name='MessageWithRemappings',
query_params=['remapped_field', 'enum_field'])
request = MessageWithRemappings(
str_field='foo', enum_field=MessageWithRemappings.AnEnum.value_one)
http_request = FakeService().PrepareHttpRequest(method_config, request)
result_params = urllib_parse.parse_qs(
urllib_parse.urlparse(http_request.url).query)
expected_params = {'enum_field': 'ONE%2FTWO', 'remapped_field': 'foo'}
self.assertTrue(expected_params, result_params)
def testPathRemapping(self):
method_config = base_api.ApiMethodInfo(
relative_path='parameters/{remapped_field}/remap/{enum_field}',
request_type_name='MessageWithRemappings',
path_params=['remapped_field', 'enum_field'])
request = MessageWithRemappings(
str_field='gonna',
enum_field=MessageWithRemappings.AnEnum.value_one)
service = FakeService()
expected_url = service.client.url + 'parameters/gonna/remap/ONE%2FTWO'
http_request = service.PrepareHttpRequest(method_config, request)
self.assertEqual(expected_url, http_request.url)
method_config.relative_path = (
'parameters/{+remapped_field}/remap/{+enum_field}')
expected_url = service.client.url + 'parameters/gonna/remap/ONE/TWO'
http_request = service.PrepareHttpRequest(method_config, request)
self.assertEqual(expected_url, http_request.url)
def testColonInRelativePath(self):
method_config = base_api.ApiMethodInfo(
relative_path='path:withJustColon',
request_type_name='SimpleMessage')
service = FakeService()
request = SimpleMessage()
http_request = service.PrepareHttpRequest(method_config, request)
self.assertEqual('http://www.example.com/path:withJustColon',
http_request.url)
def testOverwritesTransferUrlBase(self):
client = self.__GetFakeClient()
client.overwrite_transfer_urls_with_client_base = True
client._url = 'http://custom.p.googleapis.com/'
observed = client.FinalizeTransferUrl(
'http://normal.googleapis.com/path')
expected = 'http://custom.p.googleapis.com/path'
self.assertEqual(observed, expected)
def testApiVersionSystemParameter(self):
method_config = base_api.ApiMethodInfo(
request_type_name='SimpleMessage', api_version_param='2024-01-01')
service = FakeService()
request = SimpleMessage()
http_request = service.PrepareHttpRequest(method_config, request)
self.assertIn('X-Goog-Api-Version', http_request.headers)
self.assertEqual(
'2024-01-01', http_request.headers['X-Goog-Api-Version'])
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/base_api.py | apitools/base/py/base_api.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base class for api services."""
import base64
import contextlib
import datetime
import logging
import pprint
import six
from six.moves import http_client
from six.moves import urllib
from apitools.base.protorpclite import message_types
from apitools.base.protorpclite import messages
from apitools.base.py import encoding
from apitools.base.py import exceptions
from apitools.base.py import http_wrapper
from apitools.base.py import util
__all__ = [
'ApiMethodInfo',
'ApiUploadInfo',
'BaseApiClient',
'BaseApiService',
'NormalizeApiEndpoint',
]
# TODO(craigcitro): Remove this once we quiet the spurious logging in
# oauth2client (or drop oauth2client).
logging.getLogger('oauth2client.util').setLevel(logging.ERROR)
_MAX_URL_LENGTH = 2048
class ApiUploadInfo(messages.Message):
"""Media upload information for a method.
Fields:
accept: (repeated) MIME Media Ranges for acceptable media uploads
to this method.
max_size: (integer) Maximum size of a media upload, such as 3MB
or 1TB (converted to an integer).
resumable_path: Path to use for resumable uploads.
resumable_multipart: (boolean) Whether or not the resumable endpoint
supports multipart uploads.
simple_path: Path to use for simple uploads.
simple_multipart: (boolean) Whether or not the simple endpoint
supports multipart uploads.
"""
accept = messages.StringField(1, repeated=True)
max_size = messages.IntegerField(2)
resumable_path = messages.StringField(3)
resumable_multipart = messages.BooleanField(4)
simple_path = messages.StringField(5)
simple_multipart = messages.BooleanField(6)
class ApiMethodInfo(messages.Message):
"""Configuration info for an API method.
All fields are strings unless noted otherwise.
Fields:
relative_path: Relative path for this method.
flat_path: Expanded version (if any) of relative_path.
method_id: ID for this method.
http_method: HTTP verb to use for this method.
path_params: (repeated) path parameters for this method.
query_params: (repeated) query parameters for this method.
ordered_params: (repeated) ordered list of parameters for
this method.
description: description of this method.
request_type_name: name of the request type.
response_type_name: name of the response type.
request_field: if not null, the field to pass as the body
of this POST request. may also be the REQUEST_IS_BODY
value below to indicate the whole message is the body.
upload_config: (ApiUploadInfo) Information about the upload
configuration supported by this method.
supports_download: (boolean) If True, this method supports
downloading the request via the `alt=media` query
parameter.
api_version_param: API version system parameter for this
method.
"""
relative_path = messages.StringField(1)
flat_path = messages.StringField(2)
method_id = messages.StringField(3)
http_method = messages.StringField(4)
path_params = messages.StringField(5, repeated=True)
query_params = messages.StringField(6, repeated=True)
ordered_params = messages.StringField(7, repeated=True)
description = messages.StringField(8)
request_type_name = messages.StringField(9)
response_type_name = messages.StringField(10)
request_field = messages.StringField(11, default='')
upload_config = messages.MessageField(ApiUploadInfo, 12)
supports_download = messages.BooleanField(13, default=False)
api_version_param = messages.StringField(14)
REQUEST_IS_BODY = '<request>'
def _LoadClass(name, messages_module):
if name.startswith('message_types.'):
_, _, classname = name.partition('.')
return getattr(message_types, classname)
elif '.' not in name:
return getattr(messages_module, name)
else:
raise exceptions.GeneratedClientError('Unknown class %s' % name)
def _RequireClassAttrs(obj, attrs):
for attr in attrs:
attr_name = attr.upper()
if not hasattr(obj, '%s' % attr_name) or not getattr(obj, attr_name):
msg = 'No %s specified for object of class %s.' % (
attr_name, type(obj).__name__)
raise exceptions.GeneratedClientError(msg)
def NormalizeApiEndpoint(api_endpoint):
if not api_endpoint.endswith('/'):
api_endpoint += '/'
return api_endpoint
def _urljoin(base, url): # pylint: disable=invalid-name
"""Custom urljoin replacement supporting : before / in url."""
# In general, it's unsafe to simply join base and url. However, for
# the case of discovery documents, we know:
# * base will never contain params, query, or fragment
# * url will never contain a scheme or net_loc.
# In general, this means we can safely join on /; we just need to
# ensure we end up with precisely one / joining base and url. The
# exception here is the case of media uploads, where url will be an
# absolute url.
if url.startswith('http://') or url.startswith('https://'):
return urllib.parse.urljoin(base, url)
new_base = base if base.endswith('/') else base + '/'
new_url = url[1:] if url.startswith('/') else url
return new_base + new_url
class _UrlBuilder(object):
"""Convenient container for url data."""
def __init__(self, base_url, relative_path=None, query_params=None):
components = urllib.parse.urlsplit(_urljoin(
base_url, relative_path or ''))
if components.fragment:
raise exceptions.ConfigurationValueError(
'Unexpected url fragment: %s' % components.fragment)
self.query_params = urllib.parse.parse_qs(components.query or '')
if query_params is not None:
self.query_params.update(query_params)
self.__scheme = components.scheme
self.__netloc = components.netloc
self.relative_path = components.path or ''
@classmethod
def FromUrl(cls, url):
urlparts = urllib.parse.urlsplit(url)
query_params = urllib.parse.parse_qs(urlparts.query)
base_url = urllib.parse.urlunsplit((
urlparts.scheme, urlparts.netloc, '', None, None))
relative_path = urlparts.path or ''
return cls(
base_url, relative_path=relative_path, query_params=query_params)
@property
def base_url(self):
return urllib.parse.urlunsplit(
(self.__scheme, self.__netloc, '', '', ''))
@base_url.setter
def base_url(self, value):
components = urllib.parse.urlsplit(value)
if components.path or components.query or components.fragment:
raise exceptions.ConfigurationValueError(
'Invalid base url: %s' % value)
self.__scheme = components.scheme
self.__netloc = components.netloc
@property
def query(self):
# TODO(craigcitro): In the case that some of the query params are
# non-ASCII, we may silently fail to encode correctly. We should
# figure out who is responsible for owning the object -> str
# conversion.
return urllib.parse.urlencode(self.query_params, True)
@property
def url(self):
if '{' in self.relative_path or '}' in self.relative_path:
raise exceptions.ConfigurationValueError(
'Cannot create url with relative path %s' % self.relative_path)
return urllib.parse.urlunsplit((
self.__scheme, self.__netloc, self.relative_path, self.query, ''))
def _SkipGetCredentials():
"""Hook for skipping credentials. For internal use."""
return False
class BaseApiClient(object):
"""Base class for client libraries."""
MESSAGES_MODULE = None
_API_KEY = ''
_CLIENT_ID = ''
_CLIENT_SECRET = ''
_PACKAGE = ''
_SCOPES = []
_USER_AGENT = ''
def __init__(self, url, credentials=None, get_credentials=True, http=None,
model=None, log_request=False, log_response=False,
num_retries=5, max_retry_wait=60, credentials_args=None,
default_global_params=None, additional_http_headers=None,
check_response_func=None, retry_func=None,
response_encoding=None):
_RequireClassAttrs(self, ('_package', '_scopes', 'messages_module'))
if default_global_params is not None:
util.Typecheck(default_global_params, self.params_type)
self.__default_global_params = default_global_params
self.log_request = log_request
self.log_response = log_response
self.__num_retries = 5
self.__max_retry_wait = 60
# We let the @property machinery below do our validation.
self.num_retries = num_retries
self.max_retry_wait = max_retry_wait
self._credentials = credentials
get_credentials = get_credentials and not _SkipGetCredentials()
if get_credentials and not credentials:
credentials_args = credentials_args or {}
self._SetCredentials(**credentials_args)
self._url = NormalizeApiEndpoint(url)
self._http = http or http_wrapper.GetHttp()
# Note that "no credentials" is totally possible.
if self._credentials is not None:
self._http = self._credentials.authorize(self._http)
# TODO(craigcitro): Remove this field when we switch to proto2.
self.__include_fields = None
self.additional_http_headers = additional_http_headers or {}
self.check_response_func = check_response_func
self.retry_func = retry_func
self.response_encoding = response_encoding
# Since we can't change the init arguments without regenerating clients,
# offer this hook to affect FinalizeTransferUrl behavior.
self.overwrite_transfer_urls_with_client_base = False
# TODO(craigcitro): Finish deprecating these fields.
_ = model
self.__response_type_model = 'proto'
def _SetCredentials(self, **kwds):
"""Fetch credentials, and set them for this client.
Note that we can't simply return credentials, since creating them
may involve side-effecting self.
Args:
**kwds: Additional keyword arguments are passed on to GetCredentials.
Returns:
None. Sets self._credentials.
"""
args = {
'api_key': self._API_KEY,
'client': self,
'client_id': self._CLIENT_ID,
'client_secret': self._CLIENT_SECRET,
'package_name': self._PACKAGE,
'scopes': self._SCOPES,
'user_agent': self._USER_AGENT,
}
args.update(kwds)
# credentials_lib can be expensive to import so do it only if needed.
from apitools.base.py import credentials_lib
# TODO(craigcitro): It's a bit dangerous to pass this
# still-half-initialized self into this method, but we might need
# to set attributes on it associated with our credentials.
# Consider another way around this (maybe a callback?) and whether
# or not it's worth it.
self._credentials = credentials_lib.GetCredentials(**args)
@classmethod
def ClientInfo(cls):
return {
'client_id': cls._CLIENT_ID,
'client_secret': cls._CLIENT_SECRET,
'scope': ' '.join(sorted(util.NormalizeScopes(cls._SCOPES))),
'user_agent': cls._USER_AGENT,
}
@property
def base_model_class(self):
return None
@property
def http(self):
return self._http
@property
def url(self):
return self._url
@classmethod
def GetScopes(cls):
return cls._SCOPES
@property
def params_type(self):
return _LoadClass('StandardQueryParameters', self.MESSAGES_MODULE)
@property
def user_agent(self):
return self._USER_AGENT
@property
def _default_global_params(self):
if self.__default_global_params is None:
# pylint: disable=not-callable
self.__default_global_params = self.params_type()
return self.__default_global_params
def AddGlobalParam(self, name, value):
params = self._default_global_params
setattr(params, name, value)
@property
def global_params(self):
return encoding.CopyProtoMessage(self._default_global_params)
@contextlib.contextmanager
def IncludeFields(self, include_fields):
self.__include_fields = include_fields
yield
self.__include_fields = None
@property
def response_type_model(self):
return self.__response_type_model
@contextlib.contextmanager
def JsonResponseModel(self):
"""In this context, return raw JSON instead of proto."""
old_model = self.response_type_model
self.__response_type_model = 'json'
yield
self.__response_type_model = old_model
@property
def num_retries(self):
return self.__num_retries
@num_retries.setter
def num_retries(self, value):
util.Typecheck(value, six.integer_types)
if value < 0:
raise exceptions.InvalidDataError(
'Cannot have negative value for num_retries')
self.__num_retries = value
@property
def max_retry_wait(self):
return self.__max_retry_wait
@max_retry_wait.setter
def max_retry_wait(self, value):
util.Typecheck(value, six.integer_types)
if value <= 0:
raise exceptions.InvalidDataError(
'max_retry_wait must be a postiive integer')
self.__max_retry_wait = value
@contextlib.contextmanager
def WithRetries(self, num_retries):
old_num_retries = self.num_retries
self.num_retries = num_retries
yield
self.num_retries = old_num_retries
def ProcessRequest(self, method_config, request):
"""Hook for pre-processing of requests."""
if self.log_request:
logging.info(
'Calling method %s with %s: %s', method_config.method_id,
method_config.request_type_name, request)
return request
def ProcessHttpRequest(self, http_request):
"""Hook for pre-processing of http requests."""
http_request.headers.update(self.additional_http_headers)
if self.log_request:
logging.info('Making http %s to %s',
http_request.http_method, http_request.url)
logging.info('Headers: %s', pprint.pformat(http_request.headers))
if http_request.body:
# TODO(craigcitro): Make this safe to print in the case of
# non-printable body characters.
logging.info('Body:\n%s',
http_request.loggable_body or http_request.body)
else:
logging.info('Body: (none)')
return http_request
def ProcessResponse(self, method_config, response):
if self.log_response:
logging.info('Response of type %s: %s',
method_config.response_type_name, response)
return response
# TODO(craigcitro): Decide where these two functions should live.
def SerializeMessage(self, message):
return encoding.MessageToJson(
message, include_fields=self.__include_fields)
def DeserializeMessage(self, response_type, data):
"""Deserialize the given data as method_config.response_type."""
try:
message = encoding.JsonToMessage(response_type, data)
except (exceptions.InvalidDataFromServerError,
messages.ValidationError, ValueError) as e:
raise exceptions.InvalidDataFromServerError(
'Error decoding response "%s" as type %s: %s' % (
data, response_type.__name__, e))
return message
def FinalizeTransferUrl(self, url):
"""Modify the url for a given transfer, based on auth and version."""
url_builder = _UrlBuilder.FromUrl(url)
if getattr(self.global_params, 'key', None):
url_builder.query_params['key'] = self.global_params.key
if self.overwrite_transfer_urls_with_client_base:
client_url_builder = _UrlBuilder.FromUrl(self._url)
url_builder.base_url = client_url_builder.base_url
return url_builder.url
class BaseApiService(object):
"""Base class for generated API services."""
def __init__(self, client):
self.__client = client
self._method_configs = {}
self._upload_configs = {}
@property
def _client(self):
return self.__client
@property
def client(self):
return self.__client
def GetMethodConfig(self, method):
"""Returns service cached method config for given method."""
method_config = self._method_configs.get(method)
if method_config:
return method_config
func = getattr(self, method, None)
if func is None:
raise KeyError(method)
method_config = getattr(func, 'method_config', None)
if method_config is None:
raise KeyError(method)
self._method_configs[method] = config = method_config()
return config
@classmethod
def GetMethodsList(cls):
return [f.__name__ for f in six.itervalues(cls.__dict__)
if getattr(f, 'method_config', None)]
def GetUploadConfig(self, method):
return self._upload_configs.get(method)
def GetRequestType(self, method):
method_config = self.GetMethodConfig(method)
return getattr(self.client.MESSAGES_MODULE,
method_config.request_type_name)
def GetResponseType(self, method):
method_config = self.GetMethodConfig(method)
return getattr(self.client.MESSAGES_MODULE,
method_config.response_type_name)
def __CombineGlobalParams(self, global_params, default_params):
"""Combine the given params with the defaults."""
util.Typecheck(global_params, (type(None), self.__client.params_type))
result = self.__client.params_type()
global_params = global_params or self.__client.params_type()
for field in result.all_fields():
value = global_params.get_assigned_value(field.name)
if value is None:
value = default_params.get_assigned_value(field.name)
if value not in (None, [], ()):
setattr(result, field.name, value)
return result
def __EncodePrettyPrint(self, query_info):
# The prettyPrint flag needs custom encoding: it should be encoded
# as 0 if False, and ignored otherwise (True is the default).
if not query_info.pop('prettyPrint', True):
query_info['prettyPrint'] = 0
# The One Platform equivalent of prettyPrint is pp, which also needs
# custom encoding.
if not query_info.pop('pp', True):
query_info['pp'] = 0
return query_info
def __FinalUrlValue(self, value, field):
"""Encode value for the URL, using field to skip encoding for bytes."""
if isinstance(field, messages.BytesField) and value is not None:
return base64.urlsafe_b64encode(value)
elif isinstance(value, six.text_type):
return value.encode('utf8')
elif isinstance(value, six.binary_type):
return value.decode('utf8')
elif isinstance(value, datetime.datetime):
return value.isoformat()
return value
def __ConstructQueryParams(self, query_params, request, global_params):
"""Construct a dictionary of query parameters for this request."""
# First, handle the global params.
global_params = self.__CombineGlobalParams(
global_params, self.__client.global_params)
global_param_names = util.MapParamNames(
[x.name for x in self.__client.params_type.all_fields()],
self.__client.params_type)
global_params_type = type(global_params)
query_info = dict(
(param,
self.__FinalUrlValue(getattr(global_params, param),
getattr(global_params_type, param)))
for param in global_param_names)
# Next, add the query params.
query_param_names = util.MapParamNames(query_params, type(request))
request_type = type(request)
query_info.update(
(param,
self.__FinalUrlValue(getattr(request, param, None),
getattr(request_type, param)))
for param in query_param_names)
query_info = dict((k, v) for k, v in query_info.items()
if v is not None)
query_info = self.__EncodePrettyPrint(query_info)
query_info = util.MapRequestParams(query_info, type(request))
return query_info
def __ConstructRelativePath(self, method_config, request,
relative_path=None):
"""Determine the relative path for request."""
python_param_names = util.MapParamNames(
method_config.path_params, type(request))
params = dict([(param, getattr(request, param, None))
for param in python_param_names])
params = util.MapRequestParams(params, type(request))
return util.ExpandRelativePath(method_config, params,
relative_path=relative_path)
def __FinalizeRequest(self, http_request, url_builder):
"""Make any final general adjustments to the request."""
if (http_request.http_method == 'GET' and
len(http_request.url) > _MAX_URL_LENGTH):
http_request.http_method = 'POST'
http_request.headers['x-http-method-override'] = 'GET'
http_request.headers[
'content-type'] = 'application/x-www-form-urlencoded'
http_request.body = url_builder.query
url_builder.query_params = {}
http_request.url = url_builder.url
def __ProcessHttpResponse(self, method_config, http_response, request):
"""Process the given http response."""
if http_response.status_code not in (http_client.OK,
http_client.CREATED,
http_client.NO_CONTENT):
raise exceptions.HttpError.FromResponse(
http_response, method_config=method_config, request=request)
if http_response.status_code == http_client.NO_CONTENT:
# TODO(craigcitro): Find out why _replace doesn't seem to work
# here.
http_response = http_wrapper.Response(
info=http_response.info, content='{}',
request_url=http_response.request_url)
content = http_response.content
if self._client.response_encoding and isinstance(content, bytes):
content = content.decode(self._client.response_encoding)
if self.__client.response_type_model == 'json':
return content
response_type = _LoadClass(method_config.response_type_name,
self.__client.MESSAGES_MODULE)
return self.__client.DeserializeMessage(response_type, content)
def __SetBaseHeaders(self, http_request, client):
"""Fill in the basic headers on http_request."""
# TODO(craigcitro): Make the default a little better here, and
# include the apitools version.
user_agent = client.user_agent or 'apitools-client/1.0'
http_request.headers['user-agent'] = user_agent
http_request.headers['accept'] = 'application/json'
http_request.headers['accept-encoding'] = 'gzip, deflate'
def __SetBaseSystemParams(self, http_request, method_config):
"""Fill in the system parameters to always set for the method."""
if method_config.api_version_param:
http_request.headers['X-Goog-Api-Version'] = (
method_config.api_version_param)
def __SetBody(self, http_request, method_config, request, upload):
"""Fill in the body on http_request."""
if not method_config.request_field:
return
request_type = _LoadClass(
method_config.request_type_name, self.__client.MESSAGES_MODULE)
if method_config.request_field == REQUEST_IS_BODY:
body_value = request
body_type = request_type
else:
body_value = getattr(request, method_config.request_field)
body_field = request_type.field_by_name(
method_config.request_field)
util.Typecheck(body_field, messages.MessageField)
body_type = body_field.type
# If there was no body provided, we use an empty message of the
# appropriate type.
body_value = body_value or body_type()
if upload and not body_value:
# We're going to fill in the body later.
return
util.Typecheck(body_value, body_type)
http_request.headers['content-type'] = 'application/json'
http_request.body = self.__client.SerializeMessage(body_value)
def PrepareHttpRequest(self, method_config, request, global_params=None,
upload=None, upload_config=None, download=None):
"""Prepares an HTTP request to be sent."""
request_type = _LoadClass(
method_config.request_type_name, self.__client.MESSAGES_MODULE)
util.Typecheck(request, request_type)
request = self.__client.ProcessRequest(method_config, request)
http_request = http_wrapper.Request(
http_method=method_config.http_method)
self.__SetBaseHeaders(http_request, self.__client)
self.__SetBaseSystemParams(http_request, method_config)
self.__SetBody(http_request, method_config, request, upload)
url_builder = _UrlBuilder(
self.__client.url, relative_path=method_config.relative_path)
url_builder.query_params = self.__ConstructQueryParams(
method_config.query_params, request, global_params)
# It's important that upload and download go before we fill in the
# relative path, so that they can replace it.
if upload is not None:
upload.ConfigureRequest(upload_config, http_request, url_builder)
if download is not None:
download.ConfigureRequest(http_request, url_builder)
url_builder.relative_path = self.__ConstructRelativePath(
method_config, request, relative_path=url_builder.relative_path)
self.__FinalizeRequest(http_request, url_builder)
return self.__client.ProcessHttpRequest(http_request)
def _RunMethod(self, method_config, request, global_params=None,
upload=None, upload_config=None, download=None):
"""Call this method with request."""
if upload is not None and download is not None:
# TODO(craigcitro): This just involves refactoring the logic
# below into callbacks that we can pass around; in particular,
# the order should be that the upload gets the initial request,
# and then passes its reply to a download if one exists, and
# then that goes to ProcessResponse and is returned.
raise exceptions.NotYetImplementedError(
'Cannot yet use both upload and download at once')
http_request = self.PrepareHttpRequest(
method_config, request, global_params, upload, upload_config,
download)
# TODO(craigcitro): Make num_retries customizable on Transfer
# objects, and pass in self.__client.num_retries when initializing
# an upload or download.
if download is not None:
download.InitializeDownload(http_request, client=self.client)
return
http_response = None
if upload is not None:
http_response = upload.InitializeUpload(
http_request, client=self.client)
if http_response is None:
http = self.__client.http
if upload and upload.bytes_http:
http = upload.bytes_http
opts = {
'retries': self.__client.num_retries,
'max_retry_wait': self.__client.max_retry_wait,
}
if self.__client.check_response_func:
opts['check_response_func'] = self.__client.check_response_func
if self.__client.retry_func:
opts['retry_func'] = self.__client.retry_func
http_response = http_wrapper.MakeRequest(
http, http_request, **opts)
return self.ProcessHttpResponse(method_config, http_response, request)
def ProcessHttpResponse(self, method_config, http_response, request=None):
"""Convert an HTTP response to the expected message type."""
return self.__client.ProcessResponse(
method_config,
self.__ProcessHttpResponse(method_config, http_response, request))
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/exceptions.py | apitools/base/py/exceptions.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Exceptions for generated client libraries."""
class Error(Exception):
"""Base class for all exceptions."""
class TypecheckError(Error, TypeError):
"""An object of an incorrect type is provided."""
class NotFoundError(Error):
"""A specified resource could not be found."""
class UserError(Error):
"""Base class for errors related to user input."""
class InvalidDataError(Error):
"""Base class for any invalid data error."""
class CommunicationError(Error):
"""Any communication error talking to an API server."""
class HttpError(CommunicationError):
"""Error making a request. Soon to be HttpError."""
def __init__(self, response, content, url,
method_config=None, request=None):
error_message = HttpError._build_message(response, content, url)
super(HttpError, self).__init__(error_message)
self.response = response
self.content = content
self.url = url
self.method_config = method_config
self.request = request
def __str__(self):
return HttpError._build_message(self.response, self.content, self.url)
@staticmethod
def _build_message(response, content, url):
if isinstance(content, bytes):
content = content.decode('ascii', 'replace')
return 'HttpError accessing <%s>: response: <%s>, content <%s>' % (
url, response, content)
@property
def status_code(self):
# TODO(craigcitro): Turn this into something better than a
# KeyError if there is no status.
return int(self.response['status'])
@classmethod
def FromResponse(cls, http_response, **kwargs):
try:
status_code = int(http_response.info.get('status'))
error_cls = _HTTP_ERRORS.get(status_code, cls)
except ValueError:
error_cls = cls
return error_cls(http_response.info, http_response.content,
http_response.request_url, **kwargs)
class HttpBadRequestError(HttpError):
"""HTTP 400 Bad Request."""
class HttpUnauthorizedError(HttpError):
"""HTTP 401 Unauthorized."""
class HttpForbiddenError(HttpError):
"""HTTP 403 Forbidden."""
class HttpNotFoundError(HttpError):
"""HTTP 404 Not Found."""
class HttpConflictError(HttpError):
"""HTTP 409 Conflict."""
_HTTP_ERRORS = {
400: HttpBadRequestError,
401: HttpUnauthorizedError,
403: HttpForbiddenError,
404: HttpNotFoundError,
409: HttpConflictError,
}
class InvalidUserInputError(InvalidDataError):
"""User-provided input is invalid."""
class InvalidDataFromServerError(InvalidDataError, CommunicationError):
"""Data received from the server is malformed."""
class BatchError(Error):
"""Error generated while constructing a batch request."""
class ConfigurationError(Error):
"""Base class for configuration errors."""
class GeneratedClientError(Error):
"""The generated client configuration is invalid."""
class ConfigurationValueError(UserError):
"""Some part of the user-specified client configuration is invalid."""
class ResourceUnavailableError(Error):
"""User requested an unavailable resource."""
class CredentialsError(Error):
"""Errors related to invalid credentials."""
class TransferError(CommunicationError):
"""Errors related to transfers."""
class TransferRetryError(TransferError):
"""Retryable errors related to transfers."""
class TransferInvalidError(TransferError):
"""The given transfer is invalid."""
class RequestError(CommunicationError):
"""The request was not successful."""
class RetryAfterError(HttpError):
"""The response contained a retry-after header."""
def __init__(self, response, content, url, retry_after, **kwargs):
super(RetryAfterError, self).__init__(response, content, url, **kwargs)
self.retry_after = int(retry_after)
@classmethod
def FromResponse(cls, http_response, **kwargs):
return cls(http_response.info, http_response.content,
http_response.request_url, http_response.retry_after,
**kwargs)
class BadStatusCodeError(HttpError):
"""The request completed but returned a bad status code."""
class NotYetImplementedError(GeneratedClientError):
"""This functionality is not yet implemented."""
class StreamExhausted(Error):
"""Attempted to read more bytes from a stream than were available."""
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/stream_slice.py | apitools/base/py/stream_slice.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Small helper class to provide a small slice of a stream."""
from apitools.base.py import exceptions
class StreamSlice(object):
"""Provides a slice-like object for streams."""
def __init__(self, stream, max_bytes):
self.__stream = stream
self.__remaining_bytes = max_bytes
self.__max_bytes = max_bytes
def __str__(self):
return 'Slice of stream %s with %s/%s bytes not yet read' % (
self.__stream, self.__remaining_bytes, self.__max_bytes)
def __len__(self):
return self.__max_bytes
def __nonzero__(self):
# For 32-bit python2.x, len() cannot exceed a 32-bit number; avoid
# accidental len() calls from httplib in the form of "if this_object:".
return bool(self.__max_bytes)
@property
def length(self):
# For 32-bit python2.x, len() cannot exceed a 32-bit number.
return self.__max_bytes
def read(self, size=None): # pylint: disable=missing-docstring
"""Read at most size bytes from this slice.
Compared to other streams, there is one case where we may
unexpectedly raise an exception on read: if the underlying stream
is exhausted (i.e. returns no bytes on read), and the size of this
slice indicates we should still be able to read more bytes, we
raise exceptions.StreamExhausted.
Args:
size: If provided, read no more than size bytes from the stream.
Returns:
The bytes read from this slice.
Raises:
exceptions.StreamExhausted
"""
if size is not None:
read_size = min(size, self.__remaining_bytes)
else:
read_size = self.__remaining_bytes
data = self.__stream.read(read_size)
if read_size > 0 and not data:
raise exceptions.StreamExhausted(
'Not enough bytes in stream; expected %d, exhausted '
'after %d' % (
self.__max_bytes,
self.__max_bytes - self.__remaining_bytes))
self.__remaining_bytes -= len(data)
return data
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/encoding_helper.py | apitools/base/py/encoding_helper.py | #!/usr/bin/env python
#
# Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common code for converting proto to other formats, such as JSON."""
import base64
import collections
import datetime
import json
import six
from apitools.base.protorpclite import message_types
from apitools.base.protorpclite import messages
from apitools.base.protorpclite import protojson
from apitools.base.py import exceptions
_Codec = collections.namedtuple('_Codec', ['encoder', 'decoder'])
CodecResult = collections.namedtuple('CodecResult', ['value', 'complete'])
class EdgeType(object):
"""The type of transition made by an edge."""
SCALAR = 1
REPEATED = 2
MAP = 3
class ProtoEdge(collections.namedtuple('ProtoEdge',
['type_', 'field', 'index'])):
"""A description of a one-level transition from a message to a value.
Protobuf messages can be arbitrarily nested as fields can be defined with
any "message" type. This nesting property means that there are often many
levels of proto messages within a single message instance. This class can
unambiguously describe a single step from a message to some nested value.
Properties:
type_: EdgeType, The type of transition represented by this edge.
field: str, The name of the message-typed field.
index: Any, Additional data needed to make the transition. The semantics
of the "index" property change based on the value of "type_":
SCALAR: ignored.
REPEATED: a numeric index into "field"'s list.
MAP: a key into "field"'s mapping.
"""
__slots__ = ()
def __str__(self):
if self.type_ == EdgeType.SCALAR:
return self.field
else:
return '{}[{}]'.format(self.field, self.index)
# TODO(craigcitro): Make these non-global.
_UNRECOGNIZED_FIELD_MAPPINGS = {}
_CUSTOM_MESSAGE_CODECS = {}
_CUSTOM_FIELD_CODECS = {}
_FIELD_TYPE_CODECS = {}
def MapUnrecognizedFields(field_name):
"""Register field_name as a container for unrecognized fields."""
def Register(cls):
_UNRECOGNIZED_FIELD_MAPPINGS[cls] = field_name
return cls
return Register
def RegisterCustomMessageCodec(encoder, decoder):
"""Register a custom encoder/decoder for this message class."""
def Register(cls):
_CUSTOM_MESSAGE_CODECS[cls] = _Codec(encoder=encoder, decoder=decoder)
return cls
return Register
def RegisterCustomFieldCodec(encoder, decoder):
"""Register a custom encoder/decoder for this field."""
def Register(field):
_CUSTOM_FIELD_CODECS[field] = _Codec(encoder=encoder, decoder=decoder)
return field
return Register
def RegisterFieldTypeCodec(encoder, decoder):
"""Register a custom encoder/decoder for all fields of this type."""
def Register(field_type):
_FIELD_TYPE_CODECS[field_type] = _Codec(
encoder=encoder, decoder=decoder)
return field_type
return Register
def CopyProtoMessage(message):
"""Make a deep copy of a message."""
return JsonToMessage(type(message), MessageToJson(message))
def MessageToJson(message, include_fields=None):
"""Convert the given message to JSON."""
result = _ProtoJsonApiTools.Get().encode_message(message)
return _IncludeFields(result, message, include_fields)
def JsonToMessage(message_type, message):
"""Convert the given JSON to a message of type message_type."""
return _ProtoJsonApiTools.Get().decode_message(message_type, message)
# TODO(craigcitro): Do this directly, instead of via JSON.
def DictToMessage(d, message_type):
"""Convert the given dictionary to a message of type message_type."""
return JsonToMessage(message_type, json.dumps(d))
def MessageToDict(message):
"""Convert the given message to a dictionary."""
return json.loads(MessageToJson(message))
def DictToAdditionalPropertyMessage(properties, additional_property_type,
sort_items=False):
"""Convert the given dictionary to an AdditionalProperty message."""
items = properties.items()
if sort_items:
items = sorted(items)
map_ = []
for key, value in items:
map_.append(additional_property_type.AdditionalProperty(
key=key, value=value))
return additional_property_type(additionalProperties=map_)
def PyValueToMessage(message_type, value):
"""Convert the given python value to a message of type message_type."""
return JsonToMessage(message_type, json.dumps(value))
def MessageToPyValue(message):
"""Convert the given message to a python value."""
return json.loads(MessageToJson(message))
def MessageToRepr(msg, multiline=False, **kwargs):
"""Return a repr-style string for a protorpc message.
protorpc.Message.__repr__ does not return anything that could be considered
python code. Adding this function lets us print a protorpc message in such
a way that it could be pasted into code later, and used to compare against
other things.
Args:
msg: protorpc.Message, the message to be repr'd.
multiline: bool, True if the returned string should have each field
assignment on its own line.
**kwargs: {str:str}, Additional flags for how to format the string.
Known **kwargs:
shortstrings: bool, True if all string values should be
truncated at 100 characters, since when mocking the contents
typically don't matter except for IDs, and IDs are usually
less than 100 characters.
no_modules: bool, True if the long module name should not be printed with
each type.
Returns:
str, A string of valid python (assuming the right imports have been made)
that recreates the message passed into this function.
"""
# TODO(jasmuth): craigcitro suggests a pretty-printer from apitools/gen.
indent = kwargs.get('indent', 0)
def IndentKwargs(kwargs):
kwargs = dict(kwargs)
kwargs['indent'] = kwargs.get('indent', 0) + 4
return kwargs
if isinstance(msg, list):
s = '['
for item in msg:
if multiline:
s += '\n' + ' ' * (indent + 4)
s += MessageToRepr(
item, multiline=multiline, **IndentKwargs(kwargs)) + ','
if multiline:
s += '\n' + ' ' * indent
s += ']'
return s
if isinstance(msg, messages.Message):
s = type(msg).__name__ + '('
if not kwargs.get('no_modules'):
s = msg.__module__ + '.' + s
names = sorted([field.name for field in msg.all_fields()])
for name in names:
field = msg.field_by_name(name)
if multiline:
s += '\n' + ' ' * (indent + 4)
value = getattr(msg, field.name)
s += field.name + '=' + MessageToRepr(
value, multiline=multiline, **IndentKwargs(kwargs)) + ','
if multiline:
s += '\n' + ' ' * indent
s += ')'
return s
if isinstance(msg, six.string_types):
if kwargs.get('shortstrings') and len(msg) > 100:
msg = msg[:100]
if isinstance(msg, datetime.datetime):
class SpecialTZInfo(datetime.tzinfo):
def __init__(self, offset):
super(SpecialTZInfo, self).__init__()
self.offset = offset
def __repr__(self):
s = 'TimeZoneOffset(' + repr(self.offset) + ')'
if not kwargs.get('no_modules'):
s = 'apitools.base.protorpclite.util.' + s
return s
msg = datetime.datetime(
msg.year, msg.month, msg.day, msg.hour, msg.minute, msg.second,
msg.microsecond, SpecialTZInfo(msg.tzinfo.utcoffset(0)))
return repr(msg)
def _GetField(message, field_path):
for field in field_path:
if field not in dir(message):
raise KeyError('no field "%s"' % field)
message = getattr(message, field)
return message
def _SetField(dictblob, field_path, value):
for field in field_path[:-1]:
dictblob = dictblob.setdefault(field, {})
dictblob[field_path[-1]] = value
def _IncludeFields(encoded_message, message, include_fields):
"""Add the requested fields to the encoded message."""
if include_fields is None:
return encoded_message
result = json.loads(encoded_message)
for field_name in include_fields:
try:
value = _GetField(message, field_name.split('.'))
nullvalue = None
if isinstance(value, list):
nullvalue = []
except KeyError:
raise exceptions.InvalidDataError(
'No field named %s in message of type %s' % (
field_name, type(message)))
_SetField(result, field_name.split('.'), nullvalue)
return json.dumps(result)
def _GetFieldCodecs(field, attr):
result = [
getattr(_CUSTOM_FIELD_CODECS.get(field), attr, None),
getattr(_FIELD_TYPE_CODECS.get(type(field)), attr, None),
]
return [x for x in result if x is not None]
class _ProtoJsonApiTools(protojson.ProtoJson):
"""JSON encoder used by apitools clients."""
_INSTANCE = None
@classmethod
def Get(cls):
if cls._INSTANCE is None:
cls._INSTANCE = cls()
return cls._INSTANCE
def decode_message(self, message_type, encoded_message):
if message_type in _CUSTOM_MESSAGE_CODECS:
return _CUSTOM_MESSAGE_CODECS[
message_type].decoder(encoded_message)
result = _DecodeCustomFieldNames(message_type, encoded_message)
result = super(_ProtoJsonApiTools, self).decode_message(
message_type, result)
result = _ProcessUnknownEnums(result, encoded_message)
result = _ProcessUnknownMessages(result, encoded_message)
return _DecodeUnknownFields(result, encoded_message)
def decode_field(self, field, value):
"""Decode the given JSON value.
Args:
field: a messages.Field for the field we're decoding.
value: a python value we'd like to decode.
Returns:
A value suitable for assignment to field.
"""
for decoder in _GetFieldCodecs(field, 'decoder'):
result = decoder(field, value)
value = result.value
if result.complete:
return value
if isinstance(field, messages.MessageField):
field_value = self.decode_message(
field.message_type, json.dumps(value))
elif isinstance(field, messages.EnumField):
value = GetCustomJsonEnumMapping(
field.type, json_name=value) or value
try:
field_value = super(
_ProtoJsonApiTools, self).decode_field(field, value)
except messages.DecodeError:
if not isinstance(value, six.string_types):
raise
field_value = None
else:
field_value = super(
_ProtoJsonApiTools, self).decode_field(field, value)
return field_value
def encode_message(self, message):
if isinstance(message, messages.FieldList):
return '[%s]' % (', '.join(self.encode_message(x)
for x in message))
# pylint: disable=unidiomatic-typecheck
if type(message) in _CUSTOM_MESSAGE_CODECS:
return _CUSTOM_MESSAGE_CODECS[type(message)].encoder(message)
message = _EncodeUnknownFields(message)
result = super(_ProtoJsonApiTools, self).encode_message(message)
result = _EncodeCustomFieldNames(message, result)
return json.dumps(json.loads(result), sort_keys=True)
def encode_field(self, field, value):
"""Encode the given value as JSON.
Args:
field: a messages.Field for the field we're encoding.
value: a value for field.
Returns:
A python value suitable for json.dumps.
"""
for encoder in _GetFieldCodecs(field, 'encoder'):
result = encoder(field, value)
value = result.value
if result.complete:
return value
if isinstance(field, messages.EnumField):
if field.repeated:
remapped_value = [GetCustomJsonEnumMapping(
field.type, python_name=e.name) or e.name for e in value]
else:
remapped_value = GetCustomJsonEnumMapping(
field.type, python_name=value.name)
if remapped_value:
return remapped_value
if (isinstance(field, messages.MessageField) and
not isinstance(field, message_types.DateTimeField)):
value = json.loads(self.encode_message(value))
return super(_ProtoJsonApiTools, self).encode_field(field, value)
# TODO(craigcitro): Fold this and _IncludeFields in as codecs.
def _DecodeUnknownFields(message, encoded_message):
"""Rewrite unknown fields in message into message.destination."""
destination = _UNRECOGNIZED_FIELD_MAPPINGS.get(type(message))
if destination is None:
return message
pair_field = message.field_by_name(destination)
if not isinstance(pair_field, messages.MessageField):
raise exceptions.InvalidDataFromServerError(
'Unrecognized fields must be mapped to a compound '
'message type.')
pair_type = pair_field.message_type
# TODO(craigcitro): Add more error checking around the pair
# type being exactly what we suspect (field names, etc).
if isinstance(pair_type.value, messages.MessageField):
new_values = _DecodeUnknownMessages(
message, json.loads(encoded_message), pair_type)
else:
new_values = _DecodeUnrecognizedFields(message, pair_type)
setattr(message, destination, new_values)
# We could probably get away with not setting this, but
# why not clear it?
setattr(message, '_Message__unrecognized_fields', {})
return message
def _DecodeUnknownMessages(message, encoded_message, pair_type):
"""Process unknown fields in encoded_message of a message type."""
field_type = pair_type.value.type
new_values = []
all_field_names = [x.name for x in message.all_fields()]
for name, value_dict in six.iteritems(encoded_message):
if name in all_field_names:
continue
value = PyValueToMessage(field_type, value_dict)
if pair_type.value.repeated:
value = _AsMessageList(value)
new_pair = pair_type(key=name, value=value)
new_values.append(new_pair)
return new_values
def _DecodeUnrecognizedFields(message, pair_type):
"""Process unrecognized fields in message."""
new_values = []
codec = _ProtoJsonApiTools.Get()
for unknown_field in message.all_unrecognized_fields():
# TODO(craigcitro): Consider validating the variant if
# the assignment below doesn't take care of it. It may
# also be necessary to check it in the case that the
# type has multiple encodings.
value, _ = message.get_unrecognized_field_info(unknown_field)
value_type = pair_type.field_by_name('value')
if isinstance(value_type, messages.MessageField):
decoded_value = DictToMessage(value, pair_type.value.message_type)
else:
decoded_value = codec.decode_field(
pair_type.value, value)
try:
new_pair_key = str(unknown_field)
except UnicodeEncodeError:
new_pair_key = protojson.ProtoJson().decode_field(
pair_type.key, unknown_field)
new_pair = pair_type(key=new_pair_key, value=decoded_value)
new_values.append(new_pair)
return new_values
def _CopyProtoMessageVanillaProtoJson(message):
codec = protojson.ProtoJson()
return codec.decode_message(type(message), codec.encode_message(message))
def _EncodeUnknownFields(message):
"""Remap unknown fields in message out of message.source."""
source = _UNRECOGNIZED_FIELD_MAPPINGS.get(type(message))
if source is None:
return message
# CopyProtoMessage uses _ProtoJsonApiTools, which uses this message. Use
# the vanilla protojson-based copy function to avoid infinite recursion.
result = _CopyProtoMessageVanillaProtoJson(message)
pairs_field = message.field_by_name(source)
if not isinstance(pairs_field, messages.MessageField):
raise exceptions.InvalidUserInputError(
'Invalid pairs field %s' % pairs_field)
pairs_type = pairs_field.message_type
value_field = pairs_type.field_by_name('value')
value_variant = value_field.variant
pairs = getattr(message, source)
codec = _ProtoJsonApiTools.Get()
for pair in pairs:
encoded_value = codec.encode_field(value_field, pair.value)
result.set_unrecognized_field(pair.key, encoded_value, value_variant)
setattr(result, source, [])
return result
def _SafeEncodeBytes(field, value):
"""Encode the bytes in value as urlsafe base64."""
try:
if field.repeated:
result = [base64.urlsafe_b64encode(byte) for byte in value]
else:
result = base64.urlsafe_b64encode(value)
complete = True
except TypeError:
result = value
complete = False
return CodecResult(value=result, complete=complete)
def _SafeDecodeBytes(unused_field, value):
"""Decode the urlsafe base64 value into bytes."""
try:
result = base64.urlsafe_b64decode(str(value))
complete = True
except TypeError:
result = value
complete = False
return CodecResult(value=result, complete=complete)
def _ProcessUnknownEnums(message, encoded_message):
"""Add unknown enum values from encoded_message as unknown fields.
ProtoRPC diverges from the usual protocol buffer behavior here and
doesn't allow unknown fields. Throwing on unknown fields makes it
impossible to let servers add new enum values and stay compatible
with older clients, which isn't reasonable for us. We simply store
unrecognized enum values as unknown fields, and all is well.
Args:
message: Proto message we've decoded thus far.
encoded_message: JSON string we're decoding.
Returns:
message, with any unknown enums stored as unrecognized fields.
"""
if not encoded_message:
return message
decoded_message = json.loads(six.ensure_str(encoded_message))
for field in message.all_fields():
if (isinstance(field, messages.EnumField) and
field.name in decoded_message):
value = message.get_assigned_value(field.name)
if ((field.repeated and len(value) != len(decoded_message[field.name])) or
value is None):
message.set_unrecognized_field(
field.name, decoded_message[field.name], messages.Variant.ENUM)
return message
def _ProcessUnknownMessages(message, encoded_message):
"""Store any remaining unknown fields as strings.
ProtoRPC currently ignores unknown values for which no type can be
determined (and logs a "No variant found" message). For the purposes
of reserializing, this is quite harmful (since it throws away
information). Here we simply add those as unknown fields of type
string (so that they can easily be reserialized).
Args:
message: Proto message we've decoded thus far.
encoded_message: JSON string we're decoding.
Returns:
message, with any remaining unrecognized fields saved.
"""
if not encoded_message:
return message
decoded_message = json.loads(six.ensure_str(encoded_message))
message_fields = [x.name for x in message.all_fields()] + list(
message.all_unrecognized_fields())
missing_fields = [x for x in decoded_message.keys()
if x not in message_fields]
for field_name in missing_fields:
message.set_unrecognized_field(field_name, decoded_message[field_name],
messages.Variant.STRING)
return message
RegisterFieldTypeCodec(_SafeEncodeBytes, _SafeDecodeBytes)(messages.BytesField)
# Note that these could share a dictionary, since they're keyed by
# distinct types, but it's not really worth it.
_JSON_ENUM_MAPPINGS = {}
_JSON_FIELD_MAPPINGS = {}
def AddCustomJsonEnumMapping(enum_type, python_name, json_name,
package=None): # pylint: disable=unused-argument
"""Add a custom wire encoding for a given enum value.
This is primarily used in generated code, to handle enum values
which happen to be Python keywords.
Args:
enum_type: (messages.Enum) An enum type
python_name: (basestring) Python name for this value.
json_name: (basestring) JSON name to be used on the wire.
package: (NoneType, optional) No effect, exists for legacy compatibility.
"""
if not issubclass(enum_type, messages.Enum):
raise exceptions.TypecheckError(
'Cannot set JSON enum mapping for non-enum "%s"' % enum_type)
if python_name not in enum_type.names():
raise exceptions.InvalidDataError(
'Enum value %s not a value for type %s' % (python_name, enum_type))
field_mappings = _JSON_ENUM_MAPPINGS.setdefault(enum_type, {})
_CheckForExistingMappings('enum', enum_type, python_name, json_name)
field_mappings[python_name] = json_name
def AddCustomJsonFieldMapping(message_type, python_name, json_name,
package=None): # pylint: disable=unused-argument
"""Add a custom wire encoding for a given message field.
This is primarily used in generated code, to handle enum values
which happen to be Python keywords.
Args:
message_type: (messages.Message) A message type
python_name: (basestring) Python name for this value.
json_name: (basestring) JSON name to be used on the wire.
package: (NoneType, optional) No effect, exists for legacy compatibility.
"""
if not issubclass(message_type, messages.Message):
raise exceptions.TypecheckError(
'Cannot set JSON field mapping for '
'non-message "%s"' % message_type)
try:
_ = message_type.field_by_name(python_name)
except KeyError:
raise exceptions.InvalidDataError(
'Field %s not recognized for type %s' % (
python_name, message_type))
field_mappings = _JSON_FIELD_MAPPINGS.setdefault(message_type, {})
_CheckForExistingMappings('field', message_type, python_name, json_name)
field_mappings[python_name] = json_name
def GetCustomJsonEnumMapping(enum_type, python_name=None, json_name=None):
"""Return the appropriate remapping for the given enum, or None."""
return _FetchRemapping(enum_type, 'enum',
python_name=python_name, json_name=json_name,
mappings=_JSON_ENUM_MAPPINGS)
def GetCustomJsonFieldMapping(message_type, python_name=None, json_name=None):
"""Return the appropriate remapping for the given field, or None."""
return _FetchRemapping(message_type, 'field',
python_name=python_name, json_name=json_name,
mappings=_JSON_FIELD_MAPPINGS)
def _FetchRemapping(type_name, mapping_type, python_name=None, json_name=None,
mappings=None):
"""Common code for fetching a key or value from a remapping dict."""
if python_name and json_name:
raise exceptions.InvalidDataError(
'Cannot specify both python_name and json_name '
'for %s remapping' % mapping_type)
if not (python_name or json_name):
raise exceptions.InvalidDataError(
'Must specify either python_name or json_name for %s remapping' % (
mapping_type,))
field_remappings = mappings.get(type_name, {})
if field_remappings:
if python_name:
return field_remappings.get(python_name)
elif json_name:
if json_name in list(field_remappings.values()):
return [k for k in field_remappings
if field_remappings[k] == json_name][0]
return None
def _CheckForExistingMappings(mapping_type, message_type,
python_name, json_name):
"""Validate that no mappings exist for the given values."""
if mapping_type == 'field':
getter = GetCustomJsonFieldMapping
elif mapping_type == 'enum':
getter = GetCustomJsonEnumMapping
remapping = getter(message_type, python_name=python_name)
if remapping is not None and remapping != json_name:
raise exceptions.InvalidDataError(
'Cannot add mapping for %s "%s", already mapped to "%s"' % (
mapping_type, python_name, remapping))
remapping = getter(message_type, json_name=json_name)
if remapping is not None and remapping != python_name:
raise exceptions.InvalidDataError(
'Cannot add mapping for %s "%s", already mapped to "%s"' % (
mapping_type, json_name, remapping))
def _EncodeCustomFieldNames(message, encoded_value):
field_remappings = list(_JSON_FIELD_MAPPINGS.get(type(message), {})
.items())
if field_remappings:
decoded_value = json.loads(encoded_value)
for python_name, json_name in field_remappings:
if python_name in encoded_value:
decoded_value[json_name] = decoded_value.pop(python_name)
encoded_value = json.dumps(decoded_value)
return encoded_value
def _DecodeCustomFieldNames(message_type, encoded_message):
field_remappings = _JSON_FIELD_MAPPINGS.get(message_type, {})
if field_remappings:
decoded_message = json.loads(encoded_message)
for python_name, json_name in list(field_remappings.items()):
if json_name in decoded_message:
decoded_message[python_name] = decoded_message.pop(json_name)
encoded_message = json.dumps(decoded_message)
return encoded_message
def _AsMessageList(msg):
"""Convert the provided list-as-JsonValue to a list."""
# This really needs to live in extra_types, but extra_types needs
# to import this file to be able to register codecs.
# TODO(craigcitro): Split out a codecs module and fix this ugly
# import.
from apitools.base.py import extra_types
def _IsRepeatedJsonValue(msg):
"""Return True if msg is a repeated value as a JsonValue."""
if isinstance(msg, extra_types.JsonArray):
return True
if isinstance(msg, extra_types.JsonValue) and msg.array_value:
return True
return False
if not _IsRepeatedJsonValue(msg):
raise ValueError('invalid argument to _AsMessageList')
if isinstance(msg, extra_types.JsonValue):
msg = msg.array_value
if isinstance(msg, extra_types.JsonArray):
msg = msg.entries
return msg
def _IsMap(message, field):
"""Returns whether the "field" is actually a map-type."""
value = message.get_assigned_value(field.name)
if not isinstance(value, messages.Message):
return False
try:
additional_properties = value.field_by_name('additionalProperties')
except KeyError:
return False
else:
return additional_properties.repeated
def _MapItems(message, field):
"""Yields the (key, value) pair of the map values."""
assert _IsMap(message, field)
map_message = message.get_assigned_value(field.name)
additional_properties = map_message.get_assigned_value(
'additionalProperties')
for kv_pair in additional_properties:
yield kv_pair.key, kv_pair.value
def UnrecognizedFieldIter(message, _edges=()): # pylint: disable=invalid-name
"""Yields the locations of unrecognized fields within "message".
If a sub-message is found to have unrecognized fields, that sub-message
will not be searched any further. We prune the search of the sub-message
because we assume it is malformed and further checks will not yield
productive errors.
Args:
message: The Message instance to search.
_edges: Internal arg for passing state.
Yields:
(edges_to_message, field_names):
edges_to_message: List[ProtoEdge], The edges (relative to "message")
describing the path to the sub-message where the unrecognized
fields were found.
field_names: List[Str], The names of the field(s) that were
unrecognized in the sub-message.
"""
if not isinstance(message, messages.Message):
# This is a primitive leaf, no errors found down this path.
return
field_names = message.all_unrecognized_fields()
if field_names:
# This message is malformed. Stop recursing and report it.
yield _edges, field_names
return
# Recurse through all fields in the current message.
for field in message.all_fields():
value = message.get_assigned_value(field.name)
if field.repeated:
for i, item in enumerate(value):
repeated_edge = ProtoEdge(EdgeType.REPEATED, field.name, i)
iter_ = UnrecognizedFieldIter(item, _edges + (repeated_edge,))
for (e, y) in iter_:
yield e, y
elif _IsMap(message, field):
for key, item in _MapItems(message, field):
map_edge = ProtoEdge(EdgeType.MAP, field.name, key)
iter_ = UnrecognizedFieldIter(item, _edges + (map_edge,))
for (e, y) in iter_:
yield e, y
else:
scalar_edge = ProtoEdge(EdgeType.SCALAR, field.name, None)
iter_ = UnrecognizedFieldIter(value, _edges + (scalar_edge,))
for (e, y) in iter_:
yield e, y
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/compression.py | apitools/base/py/compression.py | #!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Compression support for apitools."""
from collections import deque
from apitools.base.py import gzip
__all__ = [
'CompressStream',
]
# pylint: disable=invalid-name
# Note: Apitools only uses the default chunksize when compressing.
def CompressStream(in_stream, length=None, compresslevel=2,
chunksize=16777216):
"""Compresses an input stream into a file-like buffer.
This reads from the input stream until either we've stored at least length
compressed bytes, or the input stream has been exhausted.
This supports streams of unknown size.
Args:
in_stream: The input stream to read from.
length: The target number of compressed bytes to buffer in the output
stream. If length is none, the input stream will be compressed
until it's exhausted.
The actual length of the output buffer can vary from the target.
If the input stream is exhaused, the output buffer may be smaller
than expected. If the data is incompressible, the maximum length
can be exceeded by can be calculated to be:
chunksize + 5 * (floor((chunksize - 1) / 16383) + 1) + 17
This accounts for additional header data gzip adds. For the default
16MiB chunksize, this results in the max size of the output buffer
being:
length + 16Mib + 5142 bytes
compresslevel: Optional, defaults to 2. The desired compression level.
chunksize: Optional, defaults to 16MiB. The chunk size used when
reading data from the input stream to write into the output
buffer.
Returns:
A file-like output buffer of compressed bytes, the number of bytes read
from the input stream, and a flag denoting if the input stream was
exhausted.
"""
in_read = 0
in_exhausted = False
out_stream = StreamingBuffer()
with gzip.GzipFile(mode='wb',
fileobj=out_stream,
compresslevel=compresslevel) as compress_stream:
# Read until we've written at least length bytes to the output stream.
while not length or out_stream.length < length:
data = in_stream.read(chunksize)
data_length = len(data)
compress_stream.write(data)
in_read += data_length
# If we read less than requested, the stream is exhausted.
if data_length < chunksize:
in_exhausted = True
break
return out_stream, in_read, in_exhausted
class StreamingBuffer(object):
"""Provides a file-like object that writes to a temporary buffer.
When data is read from the buffer, it is permanently removed. This is
useful when there are memory constraints preventing the entire buffer from
being stored in memory.
"""
def __init__(self):
# The buffer of byte arrays.
self.__buf = deque()
# The number of bytes in __buf.
self.__size = 0
def __len__(self):
return self.__size
def __nonzero__(self):
# For 32-bit python2.x, len() cannot exceed a 32-bit number; avoid
# accidental len() calls from httplib in the form of "if this_object:".
return bool(self.__size)
@property
def length(self):
# For 32-bit python2.x, len() cannot exceed a 32-bit number.
return self.__size
def write(self, data):
# Gzip can write many 0 byte chunks for highly compressible data.
# Prevent them from being added internally.
if data is not None and data:
self.__buf.append(data)
self.__size += len(data)
def read(self, size=None):
"""Read at most size bytes from this buffer.
Bytes read from this buffer are consumed and are permanently removed.
Args:
size: If provided, read no more than size bytes from the buffer.
Otherwise, this reads the entire buffer.
Returns:
The bytes read from this buffer.
"""
if size is None:
size = self.__size
ret_list = []
while size > 0 and self.__buf:
data = self.__buf.popleft()
size -= len(data)
ret_list.append(data)
if size < 0:
ret_list[-1], remainder = ret_list[-1][:size], ret_list[-1][size:]
self.__buf.appendleft(remainder)
ret = b''.join(ret_list)
self.__size -= len(ret)
return ret
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/http_wrapper.py | apitools/base/py/http_wrapper.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""HTTP wrapper for apitools.
This library wraps the underlying http library we use, which is
currently httplib2.
"""
import collections
import contextlib
import logging
import socket
import time
import httplib2
import six
from six.moves import http_client
from six.moves.urllib import parse
from apitools.base.py import exceptions
from apitools.base.py import util
# pylint: disable=ungrouped-imports
try:
from oauth2client.client import HttpAccessTokenRefreshError as TokenRefreshError # noqa
except ImportError:
from oauth2client.client import AccessTokenRefreshError as TokenRefreshError # noqa
__all__ = [
'CheckResponse',
'GetHttp',
'HandleExceptionsAndRebuildHttpConnections',
'MakeRequest',
'RebuildHttpConnections',
'Request',
'Response',
'RethrowExceptionHandler',
]
# 308 and 429 don't have names in httplib.
RESUME_INCOMPLETE = 308
TOO_MANY_REQUESTS = 429
_REDIRECT_STATUS_CODES = (
http_client.MOVED_PERMANENTLY,
http_client.FOUND,
http_client.SEE_OTHER,
http_client.TEMPORARY_REDIRECT,
RESUME_INCOMPLETE,
)
# http: An httplib2.Http instance.
# http_request: A http_wrapper.Request.
# exc: Exception being raised.
# num_retries: Number of retries consumed; used for exponential backoff.
ExceptionRetryArgs = collections.namedtuple(
'ExceptionRetryArgs', ['http', 'http_request', 'exc', 'num_retries',
'max_retry_wait', 'total_wait_sec'])
@contextlib.contextmanager
def _Httplib2Debuglevel(http_request, level, http=None):
"""Temporarily change the value of httplib2.debuglevel, if necessary.
If http_request has a `loggable_body` distinct from `body`, then we
need to prevent httplib2 from logging the full body. This sets
httplib2.debuglevel for the duration of the `with` block; however,
that alone won't change the value of existing HTTP connections. If
an httplib2.Http object is provided, we'll also change the level on
any cached connections attached to it.
Args:
http_request: a Request we're logging.
level: (int) the debuglevel for logging.
http: (optional) an httplib2.Http whose connections we should
set the debuglevel on.
Yields:
None.
"""
if http_request.loggable_body is None:
yield
return
old_level = httplib2.debuglevel
http_levels = {}
httplib2.debuglevel = level
if http is not None:
for connection_key, connection in http.connections.items():
# httplib2 stores two kinds of values in this dict, connection
# classes and instances. Since the connection types are all
# old-style classes, we can't easily distinguish by connection
# type -- so instead we use the key pattern.
if ':' not in connection_key:
continue
http_levels[connection_key] = connection.debuglevel
connection.set_debuglevel(level)
yield
httplib2.debuglevel = old_level
if http is not None:
for connection_key, old_level in http_levels.items():
if connection_key in http.connections:
http.connections[connection_key].set_debuglevel(old_level)
class Request(object):
"""Class encapsulating the data for an HTTP request."""
def __init__(self, url='', http_method='GET', headers=None, body=''):
self.url = url
self.http_method = http_method
self.headers = headers or {}
self.__body = None
self.__loggable_body = None
self.body = body
@property
def loggable_body(self):
return self.__loggable_body
@loggable_body.setter
def loggable_body(self, value):
if self.body is None:
raise exceptions.RequestError(
'Cannot set loggable body on request with no body')
self.__loggable_body = value
@property
def body(self):
return self.__body
@body.setter
def body(self, value):
"""Sets the request body; handles logging and length measurement."""
self.__body = value
if value is not None:
# Avoid calling len() which cannot exceed 4GiB in 32-bit python.
body_length = getattr(
self.__body, 'length', None) or len(self.__body)
self.headers['content-length'] = str(body_length)
else:
self.headers.pop('content-length', None)
# This line ensures we don't try to print large requests.
if not isinstance(value, (type(None), six.string_types)):
self.loggable_body = '<media body>'
# Note: currently the order of fields here is important, since we want
# to be able to pass in the result from httplib2.request.
class Response(collections.namedtuple(
'HttpResponse', ['info', 'content', 'request_url'])):
"""Class encapsulating data for an HTTP response."""
__slots__ = ()
def __len__(self):
return self.length
@property
def length(self):
"""Return the length of this response.
We expose this as an attribute since using len() directly can fail
for responses larger than sys.maxint.
Returns:
Response length (as int or long)
"""
def ProcessContentRange(content_range):
_, _, range_spec = content_range.partition(' ')
byte_range, _, _ = range_spec.partition('/')
start, _, end = byte_range.partition('-')
return int(end) - int(start) + 1
if '-content-encoding' in self.info and 'content-range' in self.info:
# httplib2 rewrites content-length in the case of a compressed
# transfer; we can't trust the content-length header in that
# case, but we *can* trust content-range, if it's present.
return ProcessContentRange(self.info['content-range'])
elif 'content-length' in self.info:
return int(self.info.get('content-length'))
elif 'content-range' in self.info:
return ProcessContentRange(self.info['content-range'])
return len(self.content)
@property
def status_code(self):
return int(self.info['status'])
@property
def retry_after(self):
if 'retry-after' in self.info:
return int(self.info['retry-after'])
@property
def is_redirect(self):
return (self.status_code in _REDIRECT_STATUS_CODES and
'location' in self.info)
def CheckResponse(response):
if response is None:
# Caller shouldn't call us if the response is None, but handle anyway.
raise exceptions.RequestError(
'Request to url %s did not return a response.' %
response.request_url)
elif (response.status_code >= 500 or
response.status_code == TOO_MANY_REQUESTS):
raise exceptions.BadStatusCodeError.FromResponse(response)
elif response.retry_after:
raise exceptions.RetryAfterError.FromResponse(response)
def RebuildHttpConnections(http):
"""Rebuilds all http connections in the httplib2.Http instance.
httplib2 overloads the map in http.connections to contain two different
types of values:
{ scheme string: connection class } and
{ scheme + authority string : actual http connection }
Here we remove all of the entries for actual connections so that on the
next request httplib2 will rebuild them from the connection types.
Args:
http: An httplib2.Http instance.
"""
if getattr(http, 'connections', None):
for conn_key in list(http.connections.keys()):
if ':' in conn_key:
del http.connections[conn_key]
def RethrowExceptionHandler(*unused_args):
# pylint: disable=misplaced-bare-raise
raise
def HandleExceptionsAndRebuildHttpConnections(retry_args):
"""Exception handler for http failures.
This catches known failures and rebuilds the underlying HTTP connections.
Args:
retry_args: An ExceptionRetryArgs tuple.
"""
# If the server indicates how long to wait, use that value. Otherwise,
# calculate the wait time on our own.
retry_after = None
# Transport failures
if isinstance(retry_args.exc, (http_client.BadStatusLine,
http_client.IncompleteRead,
http_client.ResponseNotReady)):
logging.debug('Caught HTTP error %s, retrying: %s',
type(retry_args.exc).__name__, retry_args.exc)
elif isinstance(retry_args.exc, socket.error):
logging.debug('Caught socket error, retrying: %s', retry_args.exc)
elif isinstance(retry_args.exc, socket.gaierror):
logging.debug(
'Caught socket address error, retrying: %s', retry_args.exc)
elif isinstance(retry_args.exc, socket.timeout):
logging.debug(
'Caught socket timeout error, retrying: %s', retry_args.exc)
elif isinstance(retry_args.exc, httplib2.ServerNotFoundError):
logging.debug(
'Caught server not found error, retrying: %s', retry_args.exc)
elif isinstance(retry_args.exc, ValueError):
# oauth2client tries to JSON-decode the response, which can result
# in a ValueError if the response was invalid. Until that is fixed in
# oauth2client, need to handle it here.
logging.debug('Response content was invalid (%s), retrying',
retry_args.exc)
elif (isinstance(retry_args.exc, TokenRefreshError) and
hasattr(retry_args.exc, 'status') and
(retry_args.exc.status == TOO_MANY_REQUESTS or
retry_args.exc.status >= 500)):
logging.debug(
'Caught transient credential refresh error (%s), retrying',
retry_args.exc)
elif isinstance(retry_args.exc, exceptions.RequestError):
logging.debug('Request returned no response, retrying')
# API-level failures
elif isinstance(retry_args.exc, exceptions.BadStatusCodeError):
logging.debug('Response returned status %s, retrying',
retry_args.exc.status_code)
elif isinstance(retry_args.exc, exceptions.RetryAfterError):
logging.debug('Response returned a retry-after header, retrying')
retry_after = retry_args.exc.retry_after
else:
raise retry_args.exc
RebuildHttpConnections(retry_args.http)
logging.debug('Retrying request to url %s after exception %s',
retry_args.http_request.url, retry_args.exc)
time.sleep(
retry_after or util.CalculateWaitForRetry(
retry_args.num_retries, max_wait=retry_args.max_retry_wait))
def MakeRequest(http, http_request, retries=7, max_retry_wait=60,
redirections=5,
retry_func=HandleExceptionsAndRebuildHttpConnections,
check_response_func=CheckResponse):
"""Send http_request via the given http, performing error/retry handling.
Args:
http: An httplib2.Http instance, or a http multiplexer that delegates to
an underlying http, for example, HTTPMultiplexer.
http_request: A Request to send.
retries: (int, default 7) Number of retries to attempt on retryable
replies (such as 429 or 5XX).
max_retry_wait: (int, default 60) Maximum number of seconds to wait
when retrying.
redirections: (int, default 5) Number of redirects to follow.
retry_func: Function to handle retries on exceptions. Argument is an
ExceptionRetryArgs tuple.
check_response_func: Function to validate the HTTP response.
Arguments are (Response, response content, url).
Raises:
InvalidDataFromServerError: if there is no response after retries.
Returns:
A Response object.
"""
retry = 0
first_req_time = time.time()
# Provide compatibility for breaking change in httplib2 0.16.0+:
# https://github.com/googleapis/google-api-python-client/issues/803
if hasattr(http, 'redirect_codes'):
http.redirect_codes = set(http.redirect_codes) - {308}
while True:
try:
return _MakeRequestNoRetry(
http, http_request, redirections=redirections,
check_response_func=check_response_func)
# retry_func will consume the exception types it handles and raise.
# pylint: disable=broad-except
except Exception as e:
retry += 1
if retry >= retries:
raise
else:
total_wait_sec = time.time() - first_req_time
retry_func(ExceptionRetryArgs(http, http_request, e, retry,
max_retry_wait, total_wait_sec))
def _MakeRequestNoRetry(http, http_request, redirections=5,
check_response_func=CheckResponse):
"""Send http_request via the given http.
This wrapper exists to handle translation between the plain httplib2
request/response types and the Request and Response types above.
Args:
http: An httplib2.Http instance, or a http multiplexer that delegates to
an underlying http, for example, HTTPMultiplexer.
http_request: A Request to send.
redirections: (int, default 5) Number of redirects to follow.
check_response_func: Function to validate the HTTP response.
Arguments are (Response, response content, url).
Returns:
A Response object.
Raises:
RequestError if no response could be parsed.
"""
connection_type = None
# Handle overrides for connection types. This is used if the caller
# wants control over the underlying connection for managing callbacks
# or hash digestion.
if getattr(http, 'connections', None):
url_scheme = parse.urlsplit(http_request.url).scheme
if url_scheme and url_scheme in http.connections:
connection_type = http.connections[url_scheme]
# Custom printing only at debuglevel 4
new_debuglevel = 4 if httplib2.debuglevel == 4 else 0
with _Httplib2Debuglevel(http_request, new_debuglevel, http=http):
info, content = http.request(
str(http_request.url), method=str(http_request.http_method),
body=http_request.body, headers=http_request.headers,
redirections=redirections, connection_type=connection_type)
if info is None:
raise exceptions.RequestError()
response = Response(info, content, http_request.url)
check_response_func(response)
return response
_HTTP_FACTORIES = []
def _RegisterHttpFactory(factory):
_HTTP_FACTORIES.append(factory)
def GetHttp(**kwds):
for factory in _HTTP_FACTORIES:
http = factory(**kwds)
if http is not None:
return http
return httplib2.Http(**kwds)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/extra_types.py | apitools/base/py/extra_types.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Extra types understood by apitools."""
import datetime
import json
import numbers
import six
from apitools.base.protorpclite import message_types
from apitools.base.protorpclite import messages
from apitools.base.protorpclite import protojson
from apitools.base.py import encoding_helper as encoding
from apitools.base.py import exceptions
from apitools.base.py import util
if six.PY3:
from collections.abc import Iterable
else:
from collections import Iterable
__all__ = [
'DateField',
'DateTimeMessage',
'JsonArray',
'JsonObject',
'JsonValue',
'JsonProtoEncoder',
'JsonProtoDecoder',
]
# pylint:disable=invalid-name
DateTimeMessage = message_types.DateTimeMessage
# pylint:enable=invalid-name
# We insert our own metaclass here to avoid letting ProtoRPC
# register this as the default field type for strings.
# * since ProtoRPC does this via metaclasses, we don't have any
# choice but to use one ourselves
# * since a subclass's metaclass must inherit from its superclass's
# metaclass, we're forced to have this hard-to-read inheritance.
#
# pylint: disable=protected-access
class _FieldMeta(messages._FieldMeta):
def __init__(cls, name, bases, dct): # pylint: disable=no-self-argument
# pylint: disable=super-init-not-called,non-parent-init-called
type.__init__(cls, name, bases, dct)
# pylint: enable=protected-access
class DateField(six.with_metaclass(_FieldMeta, messages.Field)):
"""Field definition for Date values."""
VARIANTS = frozenset([messages.Variant.STRING])
DEFAULT_VARIANT = messages.Variant.STRING
type = datetime.date
def _ValidateJsonValue(json_value):
entries = [(f, json_value.get_assigned_value(f.name))
for f in json_value.all_fields()]
assigned_entries = [(f, value)
for f, value in entries if value is not None]
if len(assigned_entries) != 1:
raise exceptions.InvalidDataError(
'Malformed JsonValue: %s' % json_value)
def _JsonValueToPythonValue(json_value):
"""Convert the given JsonValue to a json string."""
util.Typecheck(json_value, JsonValue)
_ValidateJsonValue(json_value)
if json_value.is_null:
return None
entries = [(f, json_value.get_assigned_value(f.name))
for f in json_value.all_fields()]
assigned_entries = [(f, value)
for f, value in entries if value is not None]
field, value = assigned_entries[0]
if not isinstance(field, messages.MessageField):
return value
elif field.message_type is JsonObject:
return _JsonObjectToPythonValue(value)
elif field.message_type is JsonArray:
return _JsonArrayToPythonValue(value)
def _JsonObjectToPythonValue(json_value):
util.Typecheck(json_value, JsonObject)
return dict([(prop.key, _JsonValueToPythonValue(prop.value)) for prop
in json_value.properties])
def _JsonArrayToPythonValue(json_value):
util.Typecheck(json_value, JsonArray)
return [_JsonValueToPythonValue(e) for e in json_value.entries]
_MAXINT64 = 2 << 63 - 1
_MININT64 = -(2 << 63)
def _PythonValueToJsonValue(py_value):
"""Convert the given python value to a JsonValue."""
if py_value is None:
return JsonValue(is_null=True)
if isinstance(py_value, bool):
return JsonValue(boolean_value=py_value)
if isinstance(py_value, six.string_types):
return JsonValue(string_value=py_value)
if isinstance(py_value, numbers.Number):
if isinstance(py_value, six.integer_types):
if _MININT64 < py_value < _MAXINT64:
return JsonValue(integer_value=py_value)
return JsonValue(double_value=float(py_value))
if isinstance(py_value, dict):
return JsonValue(object_value=_PythonValueToJsonObject(py_value))
if isinstance(py_value, Iterable):
return JsonValue(array_value=_PythonValueToJsonArray(py_value))
raise exceptions.InvalidDataError(
'Cannot convert "%s" to JsonValue' % py_value)
def _PythonValueToJsonObject(py_value):
util.Typecheck(py_value, dict)
return JsonObject(
properties=[
JsonObject.Property(key=key, value=_PythonValueToJsonValue(value))
for key, value in py_value.items()])
def _PythonValueToJsonArray(py_value):
return JsonArray(entries=list(map(_PythonValueToJsonValue, py_value)))
class JsonValue(messages.Message):
"""Any valid JSON value."""
# Is this JSON object `null`?
is_null = messages.BooleanField(1, default=False)
# Exactly one of the following is provided if is_null is False; none
# should be provided if is_null is True.
boolean_value = messages.BooleanField(2)
string_value = messages.StringField(3)
# We keep two numeric fields to keep int64 round-trips exact.
double_value = messages.FloatField(4, variant=messages.Variant.DOUBLE)
integer_value = messages.IntegerField(5, variant=messages.Variant.INT64)
# Compound types
object_value = messages.MessageField('JsonObject', 6)
array_value = messages.MessageField('JsonArray', 7)
class JsonObject(messages.Message):
"""A JSON object value.
Messages:
Property: A property of a JsonObject.
Fields:
properties: A list of properties of a JsonObject.
"""
class Property(messages.Message):
"""A property of a JSON object.
Fields:
key: Name of the property.
value: A JsonValue attribute.
"""
key = messages.StringField(1)
value = messages.MessageField(JsonValue, 2)
properties = messages.MessageField(Property, 1, repeated=True)
class JsonArray(messages.Message):
"""A JSON array value."""
entries = messages.MessageField(JsonValue, 1, repeated=True)
_JSON_PROTO_TO_PYTHON_MAP = {
JsonArray: _JsonArrayToPythonValue,
JsonObject: _JsonObjectToPythonValue,
JsonValue: _JsonValueToPythonValue,
}
_JSON_PROTO_TYPES = tuple(_JSON_PROTO_TO_PYTHON_MAP.keys())
def _JsonProtoToPythonValue(json_proto):
util.Typecheck(json_proto, _JSON_PROTO_TYPES)
return _JSON_PROTO_TO_PYTHON_MAP[type(json_proto)](json_proto)
def _PythonValueToJsonProto(py_value):
if isinstance(py_value, dict):
return _PythonValueToJsonObject(py_value)
if (isinstance(py_value, Iterable) and
not isinstance(py_value, six.string_types)):
return _PythonValueToJsonArray(py_value)
return _PythonValueToJsonValue(py_value)
def _JsonProtoToJson(json_proto, unused_encoder=None):
return json.dumps(_JsonProtoToPythonValue(json_proto))
def _JsonToJsonProto(json_data, unused_decoder=None):
return _PythonValueToJsonProto(json.loads(json_data))
def _JsonToJsonValue(json_data, unused_decoder=None):
result = _PythonValueToJsonProto(json.loads(json_data))
if isinstance(result, JsonValue):
return result
elif isinstance(result, JsonObject):
return JsonValue(object_value=result)
elif isinstance(result, JsonArray):
return JsonValue(array_value=result)
else:
raise exceptions.InvalidDataError(
'Malformed JsonValue: %s' % json_data)
# pylint:disable=invalid-name
JsonProtoEncoder = _JsonProtoToJson
JsonProtoDecoder = _JsonToJsonProto
# pylint:enable=invalid-name
encoding.RegisterCustomMessageCodec(
encoder=JsonProtoEncoder, decoder=_JsonToJsonValue)(JsonValue)
encoding.RegisterCustomMessageCodec(
encoder=JsonProtoEncoder, decoder=JsonProtoDecoder)(JsonObject)
encoding.RegisterCustomMessageCodec(
encoder=JsonProtoEncoder, decoder=JsonProtoDecoder)(JsonArray)
def _EncodeDateTimeField(field, value):
result = protojson.ProtoJson().encode_field(field, value)
return encoding.CodecResult(value=result, complete=True)
def _DecodeDateTimeField(unused_field, value):
result = protojson.ProtoJson().decode_field(
message_types.DateTimeField(1), value)
return encoding.CodecResult(value=result, complete=True)
encoding.RegisterFieldTypeCodec(_EncodeDateTimeField, _DecodeDateTimeField)(
message_types.DateTimeField)
def _EncodeInt64Field(field, value):
"""Handle the special case of int64 as a string."""
capabilities = [
messages.Variant.INT64,
messages.Variant.UINT64,
]
if field.variant not in capabilities:
return encoding.CodecResult(value=value, complete=False)
if field.repeated:
result = [str(x) for x in value]
else:
result = str(value)
return encoding.CodecResult(value=result, complete=True)
def _DecodeInt64Field(unused_field, value):
# Don't need to do anything special, they're decoded just fine
return encoding.CodecResult(value=value, complete=False)
encoding.RegisterFieldTypeCodec(_EncodeInt64Field, _DecodeInt64Field)(
messages.IntegerField)
def _EncodeDateField(field, value):
"""Encoder for datetime.date objects."""
if field.repeated:
result = [d.isoformat() for d in value]
else:
result = value.isoformat()
return encoding.CodecResult(value=result, complete=True)
def _DecodeDateField(unused_field, value):
date = datetime.datetime.strptime(value, '%Y-%m-%d').date()
return encoding.CodecResult(value=date, complete=True)
encoding.RegisterFieldTypeCodec(_EncodeDateField, _DecodeDateField)(DateField)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/encoding_test.py | apitools/base/py/encoding_test.py | # -*- coding: utf-8 -*-
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import datetime
import json
import sys
import unittest
from apitools.base.protorpclite import message_types
from apitools.base.protorpclite import messages
from apitools.base.protorpclite import util
from apitools.base.py import encoding
from apitools.base.py import exceptions
from apitools.base.py import extra_types
class SimpleMessage(messages.Message):
field = messages.StringField(1)
repfield = messages.StringField(2, repeated=True)
class BytesMessage(messages.Message):
field = messages.BytesField(1)
repfield = messages.BytesField(2, repeated=True)
class TimeMessage(messages.Message):
timefield = message_types.DateTimeField(3)
@encoding.MapUnrecognizedFields('additionalProperties')
class AdditionalPropertiesMessage(messages.Message):
class AdditionalProperty(messages.Message):
key = messages.StringField(1)
value = messages.StringField(2)
additionalProperties = messages.MessageField(
'AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class AdditionalIntPropertiesMessage(messages.Message):
class AdditionalProperty(messages.Message):
key = messages.StringField(1)
value = messages.IntegerField(2)
additionalProperties = messages.MessageField(
'AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class UnrecognizedEnumMessage(messages.Message):
class ThisEnum(messages.Enum):
VALUE_ONE = 1
VALUE_TWO = 2
class AdditionalProperty(messages.Message):
key = messages.StringField(1)
value = messages.EnumField('UnrecognizedEnumMessage.ThisEnum', 2)
additionalProperties = messages.MessageField(
AdditionalProperty, 1, repeated=True)
class CompoundPropertyType(messages.Message):
index = messages.IntegerField(1)
name = messages.StringField(2)
class MessageWithEnum(messages.Message):
class ThisEnum(messages.Enum):
VALUE_ONE = 1
VALUE_TWO = 2
field_one = messages.EnumField(ThisEnum, 1)
field_two = messages.EnumField(ThisEnum, 2, default=ThisEnum.VALUE_TWO)
field_three = messages.EnumField(ThisEnum, 3, repeated=True)
ignored_field = messages.EnumField(ThisEnum, 4)
@encoding.MapUnrecognizedFields('additionalProperties')
class AdditionalMessagePropertiesMessage(messages.Message):
class AdditionalProperty(messages.Message):
key = messages.StringField(1)
value = messages.MessageField(CompoundPropertyType, 2)
additionalProperties = messages.MessageField(
'AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class MapToMessageWithEnum(messages.Message):
class AdditionalProperty(messages.Message):
key = messages.StringField(1)
value = messages.MessageField(MessageWithEnum, 2)
additionalProperties = messages.MessageField(
'AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class NestedAdditionalPropertiesWithEnumMessage(messages.Message):
class AdditionalProperty(messages.Message):
key = messages.StringField(1)
value = messages.MessageField(
MapToMessageWithEnum, 2)
additionalProperties = messages.MessageField(
'AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class AdditionalPropertiesWithEnumMessage(messages.Message):
class AdditionalProperty(messages.Message):
key = messages.StringField(1)
value = messages.MessageField(MessageWithEnum, 2)
additionalProperties = messages.MessageField(
'AdditionalProperty', 1, repeated=True)
class NestedMapMessage(messages.Message):
msg_field = messages.MessageField(AdditionalPropertiesWithEnumMessage, 1)
class RepeatedNestedMapMessage(messages.Message):
map_field = messages.MessageField(NestedMapMessage, 1, repeated=True)
class NestedWithEnumMessage(messages.Message):
class ThisEnum(messages.Enum):
VALUE_ONE = 1
VALUE_TWO = 2
msg_field = messages.MessageField(MessageWithEnum, 1)
enum_field = messages.EnumField(ThisEnum, 2)
class RepeatedNestedMessage(messages.Message):
msg_field = messages.MessageField(SimpleMessage, 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class MapToBytesValue(messages.Message):
class AdditionalProperty(messages.Message):
key = messages.StringField(1)
value = messages.BytesField(2)
additionalProperties = messages.MessageField('AdditionalProperty', 1,
repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class MapToDateTimeValue(messages.Message):
class AdditionalProperty(messages.Message):
key = messages.StringField(1)
value = message_types.DateTimeField(2)
additionalProperties = messages.MessageField('AdditionalProperty', 1,
repeated=True)
class HasNestedMessage(messages.Message):
nested = messages.MessageField(AdditionalPropertiesMessage, 1)
nested_list = messages.StringField(2, repeated=True)
class ExtraNestedMessage(messages.Message):
nested = messages.MessageField(HasNestedMessage, 1)
class MessageWithRemappings(messages.Message):
class SomeEnum(messages.Enum):
enum_value = 1
second_value = 2
enum_field = messages.EnumField(SomeEnum, 1)
double_encoding = messages.EnumField(SomeEnum, 2)
another_field = messages.StringField(3)
repeated_enum = messages.EnumField(SomeEnum, 4, repeated=True)
repeated_field = messages.StringField(5, repeated=True)
class MessageWithPackageAndRemappings(messages.Message):
class SomeEnum(messages.Enum):
enum_value = 1
second_value = 2
enum_field = messages.EnumField(SomeEnum, 1)
another_field = messages.StringField(2)
@encoding.MapUnrecognizedFields('additionalProperties')
class RepeatedJsonValueMessage(messages.Message):
class AdditionalProperty(messages.Message):
key = messages.StringField(1)
value = messages.MessageField(extra_types.JsonValue, 2, repeated=True)
additionalProperties = messages.MessageField('AdditionalProperty', 1,
repeated=True)
encoding.AddCustomJsonEnumMapping(MessageWithRemappings.SomeEnum,
'enum_value', 'wire_name')
encoding.AddCustomJsonFieldMapping(MessageWithRemappings,
'double_encoding', 'doubleEncoding')
encoding.AddCustomJsonFieldMapping(MessageWithRemappings,
'another_field', 'anotherField')
encoding.AddCustomJsonFieldMapping(MessageWithRemappings,
'repeated_field', 'repeatedField')
class EncodingTest(unittest.TestCase):
def testCopyProtoMessage(self):
msg = SimpleMessage(field='abc')
new_msg = encoding.CopyProtoMessage(msg)
self.assertEqual(msg.field, new_msg.field)
msg.field = 'def'
self.assertNotEqual(msg.field, new_msg.field)
def testCopyProtoMessageInvalidEnum(self):
json_msg = '{"field_one": "BAD_VALUE"}'
orig_msg = encoding.JsonToMessage(MessageWithEnum, json_msg)
new_msg = encoding.CopyProtoMessage(orig_msg)
for msg in (orig_msg, new_msg):
self.assertEqual(msg.all_unrecognized_fields(), ['field_one'])
self.assertEqual(
msg.get_unrecognized_field_info('field_one',
value_default=None),
('BAD_VALUE', messages.Variant.ENUM))
def testCopyProtoMessageInvalidRepeatedEnum(self):
json_msg = '{"field_three": ["VALUE_ONE", "BAD_VALUE"]}'
orig_msg = encoding.JsonToMessage(MessageWithEnum, json_msg)
new_msg = encoding.CopyProtoMessage(orig_msg)
for msg in (orig_msg, new_msg):
self.assertEqual(msg.all_unrecognized_fields(), ['field_three'])
self.assertEqual(
msg.get_unrecognized_field_info('field_three',
value_default=None),
(['VALUE_ONE', 'BAD_VALUE'], messages.Variant.ENUM))
def testCopyProtoMessageAdditionalProperties(self):
msg = AdditionalPropertiesMessage(additionalProperties=[
AdditionalPropertiesMessage.AdditionalProperty(
key='key', value='value')])
new_msg = encoding.CopyProtoMessage(msg)
self.assertEqual(len(new_msg.additionalProperties), 1)
self.assertEqual(new_msg.additionalProperties[0].key, 'key')
self.assertEqual(new_msg.additionalProperties[0].value, 'value')
def testCopyProtoMessageMappingInvalidEnum(self):
json_msg = '{"key_one": {"field_one": "BAD_VALUE"}}'
orig_msg = encoding.JsonToMessage(MapToMessageWithEnum, json_msg)
new_msg = encoding.CopyProtoMessage(orig_msg)
for msg in (orig_msg, new_msg):
self.assertEqual(
msg.additionalProperties[0].value.all_unrecognized_fields(),
['field_one'])
self.assertEqual(
msg.additionalProperties[0].value.get_unrecognized_field_info(
'field_one', value_default=None),
('BAD_VALUE', messages.Variant.ENUM))
def testCopyProtoMessageMappingInvalidRepeatedEnum(self):
json_msg = '{"key_one": {"field_three": ["VALUE_ONE", "BAD_VALUE"]}}'
orig_msg = encoding.JsonToMessage(MapToMessageWithEnum, json_msg)
new_msg = encoding.CopyProtoMessage(orig_msg)
for msg in (orig_msg, new_msg):
self.assertEqual(
msg.additionalProperties[0].value.all_unrecognized_fields(),
['field_three'])
self.assertEqual(
msg.additionalProperties[0].value.get_unrecognized_field_info(
'field_three', value_default=None),
(['VALUE_ONE', 'BAD_VALUE'], messages.Variant.ENUM))
def testBytesEncoding(self):
b64_str = 'AAc+'
b64_msg = '{"field": "%s"}' % b64_str
urlsafe_b64_str = 'AAc-'
urlsafe_b64_msg = '{"field": "%s"}' % urlsafe_b64_str
data = base64.b64decode(b64_str)
msg = BytesMessage(field=data)
self.assertEqual(
msg, encoding.JsonToMessage(BytesMessage, urlsafe_b64_msg))
self.assertEqual(msg, encoding.JsonToMessage(BytesMessage, b64_msg))
self.assertEqual(urlsafe_b64_msg, encoding.MessageToJson(msg))
enc_rep_msg = '{"repfield": ["%(b)s", "%(b)s"]}' % {
'b': urlsafe_b64_str}
rep_msg = BytesMessage(repfield=[data, data])
self.assertEqual(
rep_msg, encoding.JsonToMessage(BytesMessage, enc_rep_msg))
self.assertEqual(enc_rep_msg, encoding.MessageToJson(rep_msg))
def testBase64RoundtripForMapFields(self):
raw_data = b'\xFF\x0F\x80'
encoded_data = '/w+A' # Has url-unsafe base64 characters
safe_encoded_data = base64.urlsafe_b64encode(raw_data).decode("utf-8")
self.assertEqual(raw_data, base64.b64decode(encoded_data))
# Use unsafe encoding, make sure we can load it.
json_data = '{"1st": "%s"}' % encoded_data
msg = encoding.JsonToMessage(MapToBytesValue, json_data)
self.assertEqual(raw_data, msg.additionalProperties[0].value)
# Now back to json and again to message
from_msg_json_data = encoding.MessageToJson(msg)
# Make sure now it is safe url encoded
self.assertEqual(safe_encoded_data,
json.loads(from_msg_json_data)['1st'])
# Make sure we can also load url safe encoded bytes.
redone_msg = encoding.JsonToMessage(MapToBytesValue,
from_msg_json_data)
# Still matches
self.assertEqual(raw_data, redone_msg.additionalProperties[0].value)
def testBytesEncodingInAMap(self):
# Leading bit is 1 should not be interpreted as unicode.
data1 = b'\xF0\x11\x0F'
data2 = b'\xFF\xFF\xFF'
msg = MapToBytesValue(
additionalProperties=[
MapToBytesValue.AdditionalProperty(key='1st', value=data1),
MapToBytesValue.AdditionalProperty(key='2nd', value=data2)
])
self.assertEqual(
'{"1st": "%s", "2nd": "%s"}' % (
base64.b64encode(data1, b'-_').decode("utf-8"),
base64.b64encode(data2, b'-_').decode("utf-8")),
encoding.MessageToJson(msg))
def testDateTimeEncodingInAMap(self):
msg = MapToDateTimeValue(
additionalProperties=[
MapToDateTimeValue.AdditionalProperty(
key='1st',
value=datetime.datetime(
2014, 7, 2, 23, 33, 25, 541000,
tzinfo=util.TimeZoneOffset(datetime.timedelta(0)))),
MapToDateTimeValue.AdditionalProperty(
key='2nd',
value=datetime.datetime(
2015, 7, 2, 23, 33, 25, 541000,
tzinfo=util.TimeZoneOffset(datetime.timedelta(0))))
])
self.assertEqual(
'{"1st": "2014-07-02T23:33:25.541000+00:00",'
' "2nd": "2015-07-02T23:33:25.541000+00:00"}',
encoding.MessageToJson(msg))
def testInvalidEnumEncodingInAMap(self):
json_msg = '{"key_one": {"field_one": "BAD_VALUE"}}'
msg = encoding.JsonToMessage(MapToMessageWithEnum, json_msg)
new_msg = encoding.MessageToJson(msg)
self.assertEqual('{"key_one": {"field_one": "BAD_VALUE"}}', new_msg)
def testInvalidRepeatedEnumEncodingInAMap(self):
json_msg = '{"key_one": {"field_three": ["VALUE_ONE", "BAD_VALUE"]}}'
msg = encoding.JsonToMessage(MapToMessageWithEnum, json_msg)
new_msg = encoding.MessageToJson(msg)
self.assertEqual(
'{"key_one": {"field_three": ["VALUE_ONE", "BAD_VALUE"]}}', new_msg)
def testIncludeFields(self):
msg = SimpleMessage()
self.assertEqual('{}', encoding.MessageToJson(msg))
self.assertEqual(
'{"field": null}',
encoding.MessageToJson(msg, include_fields=['field']))
self.assertEqual(
'{"repfield": []}',
encoding.MessageToJson(msg, include_fields=['repfield']))
def testNestedIncludeFields(self):
msg = HasNestedMessage(
nested=AdditionalPropertiesMessage(
additionalProperties=[]))
self.assertEqual(
'{"nested": null}',
encoding.MessageToJson(msg, include_fields=['nested']))
self.assertEqual(
'{"nested": {"additionalProperties": []}}',
encoding.MessageToJson(
msg, include_fields=['nested.additionalProperties']))
msg = ExtraNestedMessage(nested=msg)
self.assertEqual(
'{"nested": {"nested": null}}',
encoding.MessageToJson(msg, include_fields=['nested.nested']))
# When clearing 'nested.nested_list', its sibling ('nested.nested')
# should remain unaffected.
self.assertIn(
encoding.MessageToJson(msg, include_fields=['nested.nested_list']),
['{"nested": {"nested": {}, "nested_list": []}}',
'{"nested": {"nested_list": [], "nested": {}}}'])
self.assertEqual(
'{"nested": {"nested": {"additionalProperties": []}}}',
encoding.MessageToJson(
msg, include_fields=['nested.nested.additionalProperties']))
def testAdditionalPropertyMapping(self):
msg = AdditionalPropertiesMessage()
msg.additionalProperties = [
AdditionalPropertiesMessage.AdditionalProperty(
key='key_one', value='value_one'),
AdditionalPropertiesMessage.AdditionalProperty(
key=u'key_twð', value='value_two'),
]
encoded_msg = encoding.MessageToJson(msg)
self.assertEqual(
{'key_one': 'value_one', u'key_twð': 'value_two'},
json.loads(encoded_msg))
new_msg = encoding.JsonToMessage(type(msg), encoded_msg)
self.assertEqual(
set(('key_one', u'key_twð')),
set([x.key for x in new_msg.additionalProperties]))
self.assertIsNot(msg, new_msg)
new_msg.additionalProperties.pop()
self.assertEqual(1, len(new_msg.additionalProperties))
self.assertEqual(2, len(msg.additionalProperties))
def testNumericPropertyName(self):
json_msg = '{"nested": {"123": "def"}}'
msg = encoding.JsonToMessage(HasNestedMessage, json_msg)
self.assertEqual(1, len(msg.nested.additionalProperties))
def testNumericPropertyValue(self):
json_msg = '{"key_one": "123"}'
msg = encoding.JsonToMessage(AdditionalIntPropertiesMessage, json_msg)
self.assertEqual(
AdditionalIntPropertiesMessage(
additionalProperties=[
AdditionalIntPropertiesMessage.AdditionalProperty(
key='key_one', value=123)]),
msg)
def testAdditionalMessageProperties(self):
json_msg = '{"input": {"index": 0, "name": "output"}}'
result = encoding.JsonToMessage(
AdditionalMessagePropertiesMessage, json_msg)
self.assertEqual(1, len(result.additionalProperties))
self.assertEqual(0, result.additionalProperties[0].value.index)
def testUnrecognizedEnum(self):
json_msg = '{"input": "VALUE_ONE"}'
result = encoding.JsonToMessage(
UnrecognizedEnumMessage, json_msg)
self.assertEqual(1, len(result.additionalProperties))
self.assertEqual(UnrecognizedEnumMessage.ThisEnum.VALUE_ONE,
result.additionalProperties[0].value)
def testNestedFieldMapping(self):
nested_msg = AdditionalPropertiesMessage()
nested_msg.additionalProperties = [
AdditionalPropertiesMessage.AdditionalProperty(
key='key_one', value='value_one'),
AdditionalPropertiesMessage.AdditionalProperty(
key='key_two', value='value_two'),
]
msg = HasNestedMessage(nested=nested_msg)
encoded_msg = encoding.MessageToJson(msg)
self.assertEqual(
{'nested': {'key_one': 'value_one', 'key_two': 'value_two'}},
json.loads(encoded_msg))
new_msg = encoding.JsonToMessage(type(msg), encoded_msg)
self.assertEqual(
set(('key_one', 'key_two')),
set([x.key for x in new_msg.nested.additionalProperties]))
new_msg.nested.additionalProperties.pop()
self.assertEqual(1, len(new_msg.nested.additionalProperties))
self.assertEqual(2, len(msg.nested.additionalProperties))
def testValidEnums(self):
message_json = '{"field_one": "VALUE_ONE"}'
message = encoding.JsonToMessage(MessageWithEnum, message_json)
self.assertEqual(MessageWithEnum.ThisEnum.VALUE_ONE, message.field_one)
self.assertEqual(MessageWithEnum.ThisEnum.VALUE_TWO, message.field_two)
self.assertEqual(json.loads(message_json),
json.loads(encoding.MessageToJson(message)))
def testIgnoredEnums(self):
json_with_typo = '{"field_one": "VALUE_OEN"}'
message = encoding.JsonToMessage(MessageWithEnum, json_with_typo)
self.assertEqual(None, message.field_one)
self.assertEqual(('VALUE_OEN', messages.Variant.ENUM),
message.get_unrecognized_field_info('field_one'))
self.assertEqual(json.loads(json_with_typo),
json.loads(encoding.MessageToJson(message)))
empty_json = ''
message = encoding.JsonToMessage(MessageWithEnum, empty_json)
self.assertEqual(None, message.field_one)
def testIgnoredEnumsWithDefaults(self):
json_with_typo = '{"field_two": "VALUE_OEN"}'
message = encoding.JsonToMessage(MessageWithEnum, json_with_typo)
self.assertEqual(MessageWithEnum.ThisEnum.VALUE_TWO, message.field_two)
self.assertEqual(json.loads(json_with_typo),
json.loads(encoding.MessageToJson(message)))
def testUnknownNestedRoundtrip(self):
json_message = '{"field": "abc", "submessage": {"a": 1, "b": "foo"}}'
message = encoding.JsonToMessage(SimpleMessage, json_message)
self.assertEqual(json.loads(json_message),
json.loads(encoding.MessageToJson(message)))
def testUnknownEnumNestedRoundtrip(self):
json_with_typo = ('{"outer_key": {"key_one": {"field_one": '
'"VALUE_OEN", "field_two": "VALUE_OEN", '
'"field_three": ["VALUE_ONE", "BAD_VALUE"]}}}')
msg = encoding.JsonToMessage(NestedAdditionalPropertiesWithEnumMessage,
json_with_typo)
self.assertEqual(json.loads(json_with_typo),
json.loads(encoding.MessageToJson(msg)))
def testJsonDatetime(self):
msg = TimeMessage(timefield=datetime.datetime(
2014, 7, 2, 23, 33, 25, 541000,
tzinfo=util.TimeZoneOffset(datetime.timedelta(0))))
self.assertEqual(
'{"timefield": "2014-07-02T23:33:25.541000+00:00"}',
encoding.MessageToJson(msg))
def testEnumRemapping(self):
msg = MessageWithRemappings(
enum_field=MessageWithRemappings.SomeEnum.enum_value)
json_message = encoding.MessageToJson(msg)
self.assertEqual('{"enum_field": "wire_name"}', json_message)
self.assertEqual(
msg, encoding.JsonToMessage(MessageWithRemappings, json_message))
def testRepeatedEnumRemapping(self):
msg = MessageWithRemappings(
repeated_enum=[
MessageWithRemappings.SomeEnum.enum_value,
MessageWithRemappings.SomeEnum.second_value,
])
json_message = encoding.MessageToJson(msg)
self.assertEqual('{"repeated_enum": ["wire_name", "second_value"]}',
json_message)
self.assertEqual(
msg, encoding.JsonToMessage(MessageWithRemappings, json_message))
def testFieldRemapping(self):
msg = MessageWithRemappings(another_field='abc')
json_message = encoding.MessageToJson(msg)
self.assertEqual('{"anotherField": "abc"}', json_message)
self.assertEqual(
msg, encoding.JsonToMessage(MessageWithRemappings, json_message))
def testFieldRemappingWithPackage(self):
this_module = sys.modules[__name__]
package_name = 'my_package'
try:
setattr(this_module, 'package', package_name)
encoding.AddCustomJsonFieldMapping(
MessageWithPackageAndRemappings,
'another_field', 'wire_field_name', package=package_name)
msg = MessageWithPackageAndRemappings(another_field='my value')
json_message = encoding.MessageToJson(msg)
self.assertEqual('{"wire_field_name": "my value"}', json_message)
self.assertEqual(
msg,
encoding.JsonToMessage(MessageWithPackageAndRemappings,
json_message))
finally:
delattr(this_module, 'package')
def testEnumRemappingWithPackage(self):
this_module = sys.modules[__name__]
package_name = 'my_package'
try:
setattr(this_module, 'package', package_name)
encoding.AddCustomJsonEnumMapping(
MessageWithPackageAndRemappings.SomeEnum,
'enum_value', 'other_wire_name', package=package_name)
msg = MessageWithPackageAndRemappings(
enum_field=MessageWithPackageAndRemappings.SomeEnum.enum_value)
json_message = encoding.MessageToJson(msg)
self.assertEqual('{"enum_field": "other_wire_name"}', json_message)
self.assertEqual(
msg,
encoding.JsonToMessage(MessageWithPackageAndRemappings,
json_message))
finally:
delattr(this_module, 'package')
def testRepeatedFieldRemapping(self):
msg = MessageWithRemappings(repeated_field=['abc', 'def'])
json_message = encoding.MessageToJson(msg)
self.assertEqual('{"repeatedField": ["abc", "def"]}', json_message)
self.assertEqual(
msg, encoding.JsonToMessage(MessageWithRemappings, json_message))
def testMultipleRemapping(self):
msg = MessageWithRemappings(
double_encoding=MessageWithRemappings.SomeEnum.enum_value)
json_message = encoding.MessageToJson(msg)
self.assertEqual('{"doubleEncoding": "wire_name"}', json_message)
self.assertEqual(
msg, encoding.JsonToMessage(MessageWithRemappings, json_message))
def testRepeatedRemapping(self):
# Should allow remapping if the mapping remains the same.
encoding.AddCustomJsonEnumMapping(MessageWithRemappings.SomeEnum,
'enum_value', 'wire_name')
encoding.AddCustomJsonFieldMapping(MessageWithRemappings,
'double_encoding', 'doubleEncoding')
encoding.AddCustomJsonFieldMapping(MessageWithRemappings,
'another_field', 'anotherField')
encoding.AddCustomJsonFieldMapping(MessageWithRemappings,
'repeated_field', 'repeatedField')
# Should raise errors if the remapping changes the mapping.
self.assertRaises(
exceptions.InvalidDataError,
encoding.AddCustomJsonFieldMapping,
MessageWithRemappings, 'double_encoding', 'something_else')
self.assertRaises(
exceptions.InvalidDataError,
encoding.AddCustomJsonFieldMapping,
MessageWithRemappings, 'enum_field', 'anotherField')
self.assertRaises(
exceptions.InvalidDataError,
encoding.AddCustomJsonEnumMapping,
MessageWithRemappings.SomeEnum, 'enum_value', 'another_name')
self.assertRaises(
exceptions.InvalidDataError,
encoding.AddCustomJsonEnumMapping,
MessageWithRemappings.SomeEnum, 'second_value', 'wire_name')
def testMessageToRepr(self):
# Using the same string returned by MessageToRepr, with the
# module names fixed.
# pylint: disable=bad-whitespace
msg = SimpleMessage(field='field', repfield=['field', 'field', ],)
# pylint: enable=bad-whitespace
self.assertEqual(
encoding.MessageToRepr(msg),
r"%s.SimpleMessage(field='field',repfield=['field','field',],)" % (
__name__,))
self.assertEqual(
encoding.MessageToRepr(msg, no_modules=True),
r"SimpleMessage(field='field',repfield=['field','field',],)")
def testMessageToReprWithTime(self):
msg = TimeMessage(timefield=datetime.datetime(
2014, 7, 2, 23, 33, 25, 541000,
tzinfo=util.TimeZoneOffset(datetime.timedelta(0))))
self.assertEqual(
encoding.MessageToRepr(msg, multiline=True),
('%s.TimeMessage(\n '
'timefield=datetime.datetime(2014, 7, 2, 23, 33, 25, 541000, '
'tzinfo=apitools.base.protorpclite.util.TimeZoneOffset('
'datetime.timedelta(0))),\n)') % __name__)
self.assertEqual(
encoding.MessageToRepr(msg, multiline=True, no_modules=True),
'TimeMessage(\n '
'timefield=datetime.datetime(2014, 7, 2, 23, 33, 25, 541000, '
'tzinfo=TimeZoneOffset(datetime.timedelta(0))),\n)')
def testRepeatedJsonValuesAsRepeatedProperty(self):
encoded_msg = '{"a": [{"one": 1}]}'
msg = encoding.JsonToMessage(RepeatedJsonValueMessage, encoded_msg)
self.assertEqual(encoded_msg, encoding.MessageToJson(msg))
def testDictToAdditionalPropertyMessage(self):
dict_ = {'key': 'value'}
encoded_msg = encoding.DictToAdditionalPropertyMessage(
dict_, AdditionalPropertiesMessage)
expected_msg = AdditionalPropertiesMessage()
expected_msg.additionalProperties = [
AdditionalPropertiesMessage.AdditionalProperty(
key='key', value='value')
]
self.assertEqual(encoded_msg, expected_msg)
def testDictToAdditionalPropertyMessageSorted(self):
tuples = [('key{0:02}'.format(i), 'value') for i in range(100)]
dict_ = dict(tuples)
encoded_msg = encoding.DictToAdditionalPropertyMessage(
dict_, AdditionalPropertiesMessage, sort_items=True)
expected_msg = AdditionalPropertiesMessage()
expected_msg.additionalProperties = [
AdditionalPropertiesMessage.AdditionalProperty(
key=key, value=value)
for key, value in tuples
]
self.assertEqual(encoded_msg, expected_msg)
def testDictToAdditionalPropertyMessageNumeric(self):
dict_ = {'key': 1}
encoded_msg = encoding.DictToAdditionalPropertyMessage(
dict_, AdditionalIntPropertiesMessage)
expected_msg = AdditionalIntPropertiesMessage()
expected_msg.additionalProperties = [
AdditionalIntPropertiesMessage.AdditionalProperty(
key='key', value=1)
]
self.assertEqual(encoded_msg, expected_msg)
def testUnrecognizedFieldIter(self):
m = encoding.DictToMessage({
'nested': {
'nested': {'a': 'b'},
'nested_list': ['foo'],
'extra_field': 'foo',
}
}, ExtraNestedMessage)
results = list(encoding.UnrecognizedFieldIter(m))
self.assertEqual(1, len(results))
edges, fields = results[0]
expected_edge = encoding.ProtoEdge(
encoding.EdgeType.SCALAR, 'nested', None)
self.assertEqual((expected_edge,), edges)
self.assertEqual(['extra_field'], fields)
def testUnrecognizedFieldIterRepeated(self):
m = encoding.DictToMessage({
'msg_field': [
{'field': 'foo'},
{'not_a_field': 'bar'}
]
}, RepeatedNestedMessage)
results = list(encoding.UnrecognizedFieldIter(m))
self.assertEqual(1, len(results))
edges, fields = results[0]
expected_edge = encoding.ProtoEdge(
encoding.EdgeType.REPEATED, 'msg_field', 1)
self.assertEqual((expected_edge,), edges)
self.assertEqual(['not_a_field'], fields)
def testUnrecognizedFieldIterNestedMap(self):
m = encoding.DictToMessage({
'map_field': [{
'msg_field': {
'foo': {'field_one': 1},
'bar': {'not_a_field': 1},
}
}]
}, RepeatedNestedMapMessage)
results = list(encoding.UnrecognizedFieldIter(m))
self.assertEqual(1, len(results))
edges, fields = results[0]
expected_edges = (
encoding.ProtoEdge(encoding.EdgeType.REPEATED, 'map_field', 0),
encoding.ProtoEdge(encoding.EdgeType.MAP, 'msg_field', 'bar'),
)
self.assertEqual(expected_edges, edges)
self.assertEqual(['not_a_field'], fields)
def testUnrecognizedFieldIterAbortAfterFirstError(self):
m = encoding.DictToMessage({
'msg_field': {'field_one': 3},
'enum_field': 3,
}, NestedWithEnumMessage)
self.assertEqual(1, len(list(encoding.UnrecognizedFieldIter(m))))
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/transfer_test.py | apitools/base/py/transfer_test.py | # -*- coding: utf-8 -*-
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for transfer.py."""
import string
import unittest
import httplib2
import json
import mock
import six
from six.moves import http_client
from apitools.base.py import base_api
from apitools.base.py import exceptions
from apitools.base.py import gzip
from apitools.base.py import http_wrapper
from apitools.base.py import transfer
class TransferTest(unittest.TestCase):
def assertRangeAndContentRangeCompatible(self, request, response):
request_prefix = 'bytes='
self.assertIn('range', request.headers)
self.assertTrue(request.headers['range'].startswith(request_prefix))
request_range = request.headers['range'][len(request_prefix):]
response_prefix = 'bytes '
self.assertIn('content-range', response.info)
response_header = response.info['content-range']
self.assertTrue(response_header.startswith(response_prefix))
response_range = (
response_header[len(response_prefix):].partition('/')[0])
msg = ('Request range ({0}) not a prefix of '
'response_range ({1})').format(
request_range, response_range)
self.assertTrue(response_range.startswith(request_range), msg=msg)
def testComputeEndByte(self):
total_size = 100
chunksize = 10
download = transfer.Download.FromStream(
six.StringIO(), chunksize=chunksize, total_size=total_size)
self.assertEqual(chunksize - 1,
download._Download__ComputeEndByte(0, end=50))
def testComputeEndByteReturnNone(self):
download = transfer.Download.FromStream(six.StringIO())
self.assertIsNone(
download._Download__ComputeEndByte(0, use_chunks=False))
def testComputeEndByteNoChunks(self):
total_size = 100
download = transfer.Download.FromStream(
six.StringIO(), chunksize=10, total_size=total_size)
for end in (None, 1000):
self.assertEqual(
total_size - 1,
download._Download__ComputeEndByte(0, end=end,
use_chunks=False),
msg='Failed on end={0}'.format(end))
def testComputeEndByteNoTotal(self):
download = transfer.Download.FromStream(six.StringIO())
default_chunksize = download.chunksize
for chunksize in (100, default_chunksize):
download.chunksize = chunksize
for start in (0, 10):
self.assertEqual(
download.chunksize + start - 1,
download._Download__ComputeEndByte(start),
msg='Failed on start={0}, chunksize={1}'.format(
start, chunksize))
def testComputeEndByteSmallTotal(self):
total_size = 100
download = transfer.Download.FromStream(six.StringIO(),
total_size=total_size)
for start in (0, 10):
self.assertEqual(total_size - 1,
download._Download__ComputeEndByte(start),
msg='Failed on start={0}'.format(start))
def testDownloadThenStream(self):
bytes_http = object()
http = object()
download_stream = six.StringIO()
download = transfer.Download.FromStream(download_stream,
total_size=26)
download.bytes_http = bytes_http
base_url = 'https://part.one/'
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as make_request:
make_request.return_value = http_wrapper.Response(
info={
'content-range': 'bytes 0-25/26',
'status': http_client.OK,
},
content=string.ascii_lowercase,
request_url=base_url,
)
request = http_wrapper.Request(url='https://part.one/')
download.InitializeDownload(request, http=http)
self.assertEqual(1, make_request.call_count)
received_request = make_request.call_args[0][1]
self.assertEqual(base_url, received_request.url)
self.assertRangeAndContentRangeCompatible(
received_request, make_request.return_value)
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as make_request:
make_request.return_value = http_wrapper.Response(
info={
'status': http_client.REQUESTED_RANGE_NOT_SATISFIABLE,
},
content='error',
request_url=base_url,
)
download.StreamInChunks()
self.assertEqual(1, make_request.call_count)
received_request = make_request.call_args[0][1]
self.assertEqual('bytes=26-', received_request.headers['range'])
def testGetRange(self):
for (start_byte, end_byte) in [(0, 25), (5, 15), (0, 0), (25, 25)]:
bytes_http = object()
http = object()
download_stream = six.StringIO()
download = transfer.Download.FromStream(download_stream,
total_size=26,
auto_transfer=False)
download.bytes_http = bytes_http
base_url = 'https://part.one/'
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as make_request:
make_request.return_value = http_wrapper.Response(
info={
'content-range': 'bytes %d-%d/26' %
(start_byte, end_byte),
'status': http_client.OK,
},
content=string.ascii_lowercase[start_byte:end_byte + 1],
request_url=base_url,
)
request = http_wrapper.Request(url='https://part.one/')
download.InitializeDownload(request, http=http)
download.GetRange(start_byte, end_byte)
self.assertEqual(1, make_request.call_count)
received_request = make_request.call_args[0][1]
self.assertEqual(base_url, received_request.url)
self.assertRangeAndContentRangeCompatible(
received_request, make_request.return_value)
def testNonChunkedDownload(self):
bytes_http = object()
http = object()
download_stream = six.StringIO()
download = transfer.Download.FromStream(download_stream, total_size=52)
download.bytes_http = bytes_http
base_url = 'https://part.one/'
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as make_request:
make_request.return_value = http_wrapper.Response(
info={
'content-range': 'bytes 0-51/52',
'status': http_client.OK,
},
content=string.ascii_lowercase * 2,
request_url=base_url,
)
request = http_wrapper.Request(url='https://part.one/')
download.InitializeDownload(request, http=http)
self.assertEqual(1, make_request.call_count)
received_request = make_request.call_args[0][1]
self.assertEqual(base_url, received_request.url)
self.assertRangeAndContentRangeCompatible(
received_request, make_request.return_value)
download_stream.seek(0)
self.assertEqual(string.ascii_lowercase * 2,
download_stream.getvalue())
def testChunkedDownload(self):
bytes_http = object()
http = object()
download_stream = six.StringIO()
download = transfer.Download.FromStream(
download_stream, chunksize=26, total_size=52)
download.bytes_http = bytes_http
# Setting autospec on a mock with an iterable side_effect is
# currently broken (http://bugs.python.org/issue17826), so
# instead we write a little function.
def _ReturnBytes(unused_http, http_request,
*unused_args, **unused_kwds):
url = http_request.url
if url == 'https://part.one/':
return http_wrapper.Response(
info={
'content-location': 'https://part.two/',
'content-range': 'bytes 0-25/52',
'status': http_client.PARTIAL_CONTENT,
},
content=string.ascii_lowercase,
request_url='https://part.one/',
)
elif url == 'https://part.two/':
return http_wrapper.Response(
info={
'content-range': 'bytes 26-51/52',
'status': http_client.OK,
},
content=string.ascii_uppercase,
request_url='https://part.two/',
)
else:
self.fail('Unknown URL requested: %s' % url)
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as make_request:
make_request.side_effect = _ReturnBytes
request = http_wrapper.Request(url='https://part.one/')
download.InitializeDownload(request, http=http)
self.assertEqual(2, make_request.call_count)
for call in make_request.call_args_list:
self.assertRangeAndContentRangeCompatible(
call[0][1], _ReturnBytes(*call[0]))
download_stream.seek(0)
self.assertEqual(string.ascii_lowercase + string.ascii_uppercase,
download_stream.getvalue())
# @mock.patch.object(transfer.Upload, 'RefreshResumableUploadState',
# new=mock.Mock())
def testFinalizesTransferUrlIfClientPresent(self):
"""Tests download's enforcement of client custom endpoints."""
mock_client = mock.Mock()
fake_json_data = json.dumps({
'auto_transfer': False,
'progress': 0,
'total_size': 0,
'url': 'url',
})
transfer.Download.FromData(six.BytesIO(), fake_json_data,
client=mock_client)
mock_client.FinalizeTransferUrl.assert_called_once_with('url')
def testMultipartEncoding(self):
# This is really a table test for various issues we've seen in
# the past; see notes below for particular histories.
test_cases = [
# Python's mime module by default encodes lines that start
# with "From " as ">From ", which we need to make sure we
# don't run afoul of when sending content that isn't
# intended to be so encoded. This test calls out that we
# get this right. We test for both the multipart and
# non-multipart case.
'line one\nFrom \nline two',
# We had originally used a `six.StringIO` to hold the http
# request body in the case of a multipart upload; for
# bytes being uploaded in Python3, however, this causes
# issues like this:
# https://github.com/GoogleCloudPlatform/gcloud-python/issues/1760
# We test below to ensure that we don't end up mangling
# the body before sending.
u'name,main_ingredient\nRäksmörgås,Räkor\nBaguette,Bröd',
]
for upload_contents in test_cases:
multipart_body = '{"body_field_one": 7}'
upload_bytes = upload_contents.encode('ascii', 'backslashreplace')
upload_config = base_api.ApiUploadInfo(
accept=['*/*'],
max_size=None,
resumable_multipart=True,
resumable_path=u'/resumable/upload',
simple_multipart=True,
simple_path=u'/upload',
)
url_builder = base_api._UrlBuilder('http://www.uploads.com')
# Test multipart: having a body argument in http_request forces
# multipart here.
upload = transfer.Upload.FromStream(
six.BytesIO(upload_bytes),
'text/plain',
total_size=len(upload_bytes))
http_request = http_wrapper.Request(
'http://www.uploads.com',
headers={'content-type': 'text/plain'},
body=multipart_body)
upload.ConfigureRequest(upload_config, http_request, url_builder)
self.assertEqual(
'multipart', url_builder.query_params['uploadType'])
rewritten_upload_contents = b'\n'.join(
http_request.body.split(b'--')[2].splitlines()[1:])
self.assertTrue(rewritten_upload_contents.endswith(upload_bytes))
# Test non-multipart (aka media): no body argument means this is
# sent as media.
upload = transfer.Upload.FromStream(
six.BytesIO(upload_bytes),
'text/plain',
total_size=len(upload_bytes))
http_request = http_wrapper.Request(
'http://www.uploads.com',
headers={'content-type': 'text/plain'})
upload.ConfigureRequest(upload_config, http_request, url_builder)
self.assertEqual(url_builder.query_params['uploadType'], 'media')
rewritten_upload_contents = http_request.body
self.assertTrue(rewritten_upload_contents.endswith(upload_bytes))
class UploadTest(unittest.TestCase):
def setUp(self):
# Sample highly compressible data.
self.sample_data = b'abc' * 200
# Stream of the sample data.
self.sample_stream = six.BytesIO(self.sample_data)
# Sample url_builder.
self.url_builder = base_api._UrlBuilder('http://www.uploads.com')
# Sample request.
self.request = http_wrapper.Request(
'http://www.uploads.com',
headers={'content-type': 'text/plain'})
# Sample successful response.
self.response = http_wrapper.Response(
info={'status': http_client.OK,
'location': 'http://www.uploads.com'},
content='',
request_url='http://www.uploads.com',)
# Sample failure response.
self.fail_response = http_wrapper.Response(
info={'status': http_client.SERVICE_UNAVAILABLE,
'location': 'http://www.uploads.com'},
content='',
request_url='http://www.uploads.com',)
def testStreamInChunksCompressed(self):
"""Test that StreamInChunks will handle compression correctly."""
# Create and configure the upload object.
upload = transfer.Upload(
stream=self.sample_stream,
mime_type='text/plain',
total_size=len(self.sample_data),
close_stream=False,
gzip_encoded=True)
upload.strategy = transfer.RESUMABLE_UPLOAD
# Set the chunk size so the entire stream is uploaded.
upload.chunksize = len(self.sample_data)
# Mock the upload to return the sample response.
with mock.patch.object(transfer.Upload,
'_Upload__SendMediaRequest') as mock_result, \
mock.patch.object(http_wrapper,
'MakeRequest') as make_request:
mock_result.return_value = self.response
make_request.return_value = self.response
# Initialization.
upload.InitializeUpload(self.request, 'http')
upload.StreamInChunks()
# Get the uploaded request and end position of the stream.
(request, _), _ = mock_result.call_args_list[0]
# Ensure the mock was called.
self.assertTrue(mock_result.called)
# Ensure the correct content encoding was set.
self.assertEqual(request.headers['Content-Encoding'], 'gzip')
# Ensure the stream was compresed.
self.assertLess(len(request.body), len(self.sample_data))
def testStreamMediaCompressedFail(self):
"""Test that non-chunked uploads raise an exception.
Ensure uploads with the compressed and resumable flags set called from
StreamMedia raise an exception. Those uploads are unsupported.
"""
# Create the upload object.
upload = transfer.Upload(
stream=self.sample_stream,
mime_type='text/plain',
total_size=len(self.sample_data),
close_stream=False,
auto_transfer=True,
gzip_encoded=True)
upload.strategy = transfer.RESUMABLE_UPLOAD
# Mock the upload to return the sample response.
with mock.patch.object(http_wrapper,
'MakeRequest') as make_request:
make_request.return_value = self.response
# Initialization.
upload.InitializeUpload(self.request, 'http')
# Ensure stream media raises an exception when the upload is
# compressed. Compression is not supported on non-chunked uploads.
with self.assertRaises(exceptions.InvalidUserInputError):
upload.StreamMedia()
def testAutoTransferCompressed(self):
"""Test that automatic transfers are compressed.
Ensure uploads with the compressed, resumable, and automatic transfer
flags set call StreamInChunks. StreamInChunks is tested in an earlier
test.
"""
# Create the upload object.
upload = transfer.Upload(
stream=self.sample_stream,
mime_type='text/plain',
total_size=len(self.sample_data),
close_stream=False,
gzip_encoded=True)
upload.strategy = transfer.RESUMABLE_UPLOAD
# Mock the upload to return the sample response.
with mock.patch.object(transfer.Upload,
'StreamInChunks') as mock_result, \
mock.patch.object(http_wrapper,
'MakeRequest') as make_request:
mock_result.return_value = self.response
make_request.return_value = self.response
# Initialization.
upload.InitializeUpload(self.request, 'http')
# Ensure the mock was called.
self.assertTrue(mock_result.called)
def testMultipartCompressed(self):
"""Test that multipart uploads are compressed."""
# Create the multipart configuration.
upload_config = base_api.ApiUploadInfo(
accept=['*/*'],
max_size=None,
simple_multipart=True,
simple_path=u'/upload',)
# Create the upload object.
upload = transfer.Upload(
stream=self.sample_stream,
mime_type='text/plain',
total_size=len(self.sample_data),
close_stream=False,
gzip_encoded=True)
# Set a body to trigger multipart configuration.
self.request.body = '{"body_field_one": 7}'
# Configure the request.
upload.ConfigureRequest(upload_config, self.request, self.url_builder)
# Ensure the request is a multipart request now.
self.assertEqual(
self.url_builder.query_params['uploadType'], 'multipart')
# Ensure the request is gzip encoded.
self.assertEqual(self.request.headers['Content-Encoding'], 'gzip')
# Ensure data is compressed
self.assertLess(len(self.request.body), len(self.sample_data))
# Ensure uncompressed data includes the sample data.
with gzip.GzipFile(fileobj=six.BytesIO(self.request.body)) as f:
original = f.read()
self.assertTrue(self.sample_data in original)
def testMediaCompressed(self):
"""Test that media uploads are compressed."""
# Create the media configuration.
upload_config = base_api.ApiUploadInfo(
accept=['*/*'],
max_size=None,
simple_multipart=True,
simple_path=u'/upload',)
# Create the upload object.
upload = transfer.Upload(
stream=self.sample_stream,
mime_type='text/plain',
total_size=len(self.sample_data),
close_stream=False,
gzip_encoded=True)
# Configure the request.
upload.ConfigureRequest(upload_config, self.request, self.url_builder)
# Ensure the request is a media request now.
self.assertEqual(self.url_builder.query_params['uploadType'], 'media')
# Ensure the request is gzip encoded.
self.assertEqual(self.request.headers['Content-Encoding'], 'gzip')
# Ensure data is compressed
self.assertLess(len(self.request.body), len(self.sample_data))
# Ensure uncompressed data includes the sample data.
with gzip.GzipFile(fileobj=six.BytesIO(self.request.body)) as f:
original = f.read()
self.assertTrue(self.sample_data in original)
def HttpRequestSideEffect(self, responses=None):
responses = [(response.info, response.content)
for response in responses]
def _side_effect(uri, **kwargs): # pylint: disable=unused-argument
body = kwargs['body']
read_func = getattr(body, 'read', None)
if read_func:
# If the body is a stream, consume the stream.
body = read_func()
self.assertEqual(int(kwargs['headers']['content-length']),
len(body))
return responses.pop(0)
return _side_effect
def testRetryRequestChunks(self):
"""Test that StreamInChunks will retry correctly."""
refresh_response = http_wrapper.Response(
info={'status': http_wrapper.RESUME_INCOMPLETE,
'location': 'http://www.uploads.com'},
content='',
request_url='http://www.uploads.com',)
# Create and configure the upload object.
bytes_http = httplib2.Http()
upload = transfer.Upload(
stream=self.sample_stream,
mime_type='text/plain',
total_size=len(self.sample_data),
close_stream=False,
http=bytes_http)
upload.strategy = transfer.RESUMABLE_UPLOAD
# Set the chunk size so the entire stream is uploaded.
upload.chunksize = len(self.sample_data)
# Mock the upload to return the sample response.
with mock.patch.object(bytes_http,
'request') as make_request:
# This side effect also checks the request body.
responses = [
self.response, # Initial request in InitializeUpload().
self.fail_response, # 503 status code from server.
refresh_response, # Refresh upload progress.
self.response, # Successful request.
]
make_request.side_effect = self.HttpRequestSideEffect(responses)
# Initialization.
upload.InitializeUpload(self.request, bytes_http)
upload.StreamInChunks()
# Ensure the mock was called the correct number of times.
self.assertEqual(make_request.call_count, len(responses))
def testStreamInChunks(self):
"""Test StreamInChunks."""
resume_incomplete_responses = [http_wrapper.Response(
info={'status': http_wrapper.RESUME_INCOMPLETE,
'location': 'http://www.uploads.com',
'range': '0-{}'.format(end)},
content='',
request_url='http://www.uploads.com',) for end in [199, 399, 599]]
responses = [
self.response # Initial request in InitializeUpload().
] + resume_incomplete_responses + [
self.response, # Successful request.
]
# Create and configure the upload object.
bytes_http = httplib2.Http()
upload = transfer.Upload(
stream=self.sample_stream,
mime_type='text/plain',
total_size=len(self.sample_data),
close_stream=False,
http=bytes_http)
upload.strategy = transfer.RESUMABLE_UPLOAD
# Set the chunk size so the entire stream is uploaded.
upload.chunksize = 200
# Mock the upload to return the sample response.
with mock.patch.object(bytes_http,
'request') as make_request:
# This side effect also checks the request body.
make_request.side_effect = self.HttpRequestSideEffect(responses)
# Initialization.
upload.InitializeUpload(self.request, bytes_http)
upload.StreamInChunks()
# Ensure the mock was called the correct number of times.
self.assertEqual(make_request.call_count, len(responses))
@mock.patch.object(transfer.Upload, 'RefreshResumableUploadState',
new=mock.Mock())
def testFinalizesTransferUrlIfClientPresent(self):
"""Tests upload's enforcement of client custom endpoints."""
mock_client = mock.Mock()
mock_http = mock.Mock()
fake_json_data = json.dumps({
'auto_transfer': False,
'mime_type': '',
'total_size': 0,
'url': 'url',
})
transfer.Upload.FromData(self.sample_stream, fake_json_data, mock_http,
client=mock_client)
mock_client.FinalizeTransferUrl.assert_called_once_with('url')
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/credentials_lib_test.py | apitools/base/py/credentials_lib_test.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os.path
import shutil
import tempfile
import unittest
import mock
import six
from apitools.base.py import credentials_lib
from apitools.base.py import util
class MetadataMock(object):
def __init__(self, scopes=None, service_account_name=None):
self._scopes = scopes or ['scope1']
self._sa = service_account_name or 'default'
def __call__(self, request_url):
if request_url.endswith('scopes'):
return six.StringIO(''.join(self._scopes))
elif request_url.endswith('service-accounts'):
return six.StringIO(self._sa)
elif request_url.endswith(
'/service-accounts/%s/token' % self._sa):
return six.StringIO('{"access_token": "token"}')
self.fail('Unexpected HTTP request to %s' % request_url)
class CredentialsLibTest(unittest.TestCase):
def _RunGceAssertionCredentials(
self, service_account_name=None, scopes=None, cache_filename=None):
kwargs = {}
if service_account_name is not None:
kwargs['service_account_name'] = service_account_name
if cache_filename is not None:
kwargs['cache_filename'] = cache_filename
service_account_name = service_account_name or 'default'
credentials = credentials_lib.GceAssertionCredentials(
scopes, **kwargs)
self.assertIsNone(credentials._refresh(None))
return credentials
def _GetServiceCreds(self, service_account_name=None, scopes=None):
metadatamock = MetadataMock(scopes, service_account_name)
with mock.patch.object(util, 'DetectGce', autospec=True) as gce_detect:
gce_detect.return_value = True
with mock.patch.object(credentials_lib,
'_GceMetadataRequest',
side_effect=metadatamock,
autospec=True) as opener_mock:
credentials = self._RunGceAssertionCredentials(
service_account_name=service_account_name,
scopes=scopes)
self.assertEqual(3, opener_mock.call_count)
return credentials
def testGceServiceAccounts(self):
scopes = ['scope1']
self._GetServiceCreds(service_account_name=None,
scopes=None)
self._GetServiceCreds(service_account_name=None,
scopes=scopes)
self._GetServiceCreds(
service_account_name='my_service_account',
scopes=scopes)
def testGceAssertionCredentialsToJson(self):
scopes = ['scope1']
service_account_name = 'my_service_account'
# Ensure that we can obtain a JSON representation of
# GceAssertionCredentials to put in a credential Storage object, and
# that the JSON representation is valid.
original_creds = self._GetServiceCreds(
service_account_name=service_account_name,
scopes=scopes)
original_creds_json_str = original_creds.to_json()
json.loads(original_creds_json_str)
@mock.patch.object(util, 'DetectGce', autospec=True)
def testGceServiceAccountsCached(self, mock_detect):
mock_detect.return_value = True
tempd = tempfile.mkdtemp()
tempname = os.path.join(tempd, 'creds')
scopes = ['scope1']
service_account_name = 'some_service_account_name'
metadatamock = MetadataMock(scopes, service_account_name)
with mock.patch.object(credentials_lib,
'_GceMetadataRequest',
side_effect=metadatamock,
autospec=True) as opener_mock:
try:
creds1 = self._RunGceAssertionCredentials(
service_account_name=service_account_name,
cache_filename=tempname,
scopes=scopes)
pre_cache_call_count = opener_mock.call_count
creds2 = self._RunGceAssertionCredentials(
service_account_name=service_account_name,
cache_filename=tempname,
scopes=None)
finally:
shutil.rmtree(tempd)
self.assertEqual(creds1.client_id, creds2.client_id)
self.assertEqual(pre_cache_call_count, 3)
# Caching obviates the need for extra metadata server requests.
# Only one metadata request is made if the cache is hit.
self.assertEqual(opener_mock.call_count, 4)
def testGetServiceAccount(self):
# We'd also like to test the metadata calls, which requires
# having some knowledge about how HTTP calls are made (so that
# we can mock them). It's unfortunate, but there's no way
# around it.
creds = self._GetServiceCreds()
opener = mock.MagicMock()
opener.open = mock.MagicMock()
opener.open.return_value = six.StringIO('default/\nanother')
with mock.patch.object(six.moves.urllib.request, 'build_opener',
return_value=opener,
autospec=True) as build_opener:
creds.GetServiceAccount('default')
self.assertEqual(1, build_opener.call_count)
self.assertEqual(1, opener.open.call_count)
req = opener.open.call_args[0][0]
self.assertTrue(req.get_full_url().startswith(
'http://metadata.google.internal/'))
# The urllib module does weird things with header case.
self.assertEqual('Google', req.get_header('Metadata-flavor'))
def testGetAdcNone(self):
# Tests that we correctly return None when ADC aren't present in
# the well-known file.
creds = credentials_lib._GetApplicationDefaultCredentials(
client_info={'scope': ''})
self.assertIsNone(creds)
class TestGetRunFlowFlags(unittest.TestCase):
def setUp(self):
self._flags_actual = credentials_lib.FLAGS
def tearDown(self):
credentials_lib.FLAGS = self._flags_actual
def test_with_gflags(self):
HOST = 'myhostname'
PORT = '144169'
class MockFlags(object):
auth_host_name = HOST
auth_host_port = PORT
auth_local_webserver = False
credentials_lib.FLAGS = MockFlags
flags = credentials_lib._GetRunFlowFlags([
'--auth_host_name=%s' % HOST,
'--auth_host_port=%s' % PORT,
'--noauth_local_webserver',
])
self.assertEqual(flags.auth_host_name, HOST)
self.assertEqual(flags.auth_host_port, PORT)
self.assertEqual(flags.logging_level, 'ERROR')
self.assertEqual(flags.noauth_local_webserver, True)
def test_without_gflags(self):
credentials_lib.FLAGS = None
flags = credentials_lib._GetRunFlowFlags([])
self.assertEqual(flags.auth_host_name, 'localhost')
self.assertEqual(flags.auth_host_port, [8080, 8090])
self.assertEqual(flags.logging_level, 'ERROR')
self.assertEqual(flags.noauth_local_webserver, False)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/list_pager_test.py | apitools/base/py/list_pager_test.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for list_pager."""
import unittest
from apitools.base.py import list_pager
from apitools.base.py.testing import mock
from samples.fusiontables_sample.fusiontables_v1 \
import fusiontables_v1_client as fusiontables
from samples.fusiontables_sample.fusiontables_v1 \
import fusiontables_v1_messages as messages
from samples.iam_sample.iam_v1 import iam_v1_client as iam_client
from samples.iam_sample.iam_v1 import iam_v1_messages as iam_messages
class Example(object):
def __init__(self):
self.a = 'aaa'
self.b = 'bbb'
self.c = 'ccc'
class GetterSetterTest(unittest.TestCase):
def testGetattrNested(self):
o = Example()
self.assertEqual(list_pager._GetattrNested(o, 'a'), 'aaa')
self.assertEqual(list_pager._GetattrNested(o, ('a',)), 'aaa')
o.b = Example()
self.assertEqual(list_pager._GetattrNested(o, ('b', 'c')), 'ccc')
def testSetattrNested(self):
o = Example()
list_pager._SetattrNested(o, 'b', Example())
self.assertEqual(o.b.a, 'aaa')
list_pager._SetattrNested(o, ('b', 'a'), 'AAA')
self.assertEqual(o.b.a, 'AAA')
list_pager._SetattrNested(o, ('c',), 'CCC')
self.assertEqual(o.c, 'CCC')
class ListPagerTest(unittest.TestCase):
def _AssertInstanceSequence(self, results, n):
counter = 0
for instance in results:
self.assertEqual(instance.name, 'c' + str(counter))
counter += 1
self.assertEqual(counter, n)
def setUp(self):
self.mocked_client = mock.Client(fusiontables.FusiontablesV1)
self.mocked_client.Mock()
self.addCleanup(self.mocked_client.Unmock)
def testYieldFromList(self):
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=100,
pageToken=None,
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c0'),
messages.Column(name='c1'),
messages.Column(name='c2'),
messages.Column(name='c3'),
],
nextPageToken='x',
))
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=100,
pageToken='x',
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c4'),
messages.Column(name='c5'),
messages.Column(name='c6'),
messages.Column(name='c7'),
],
))
client = fusiontables.FusiontablesV1(get_credentials=False)
request = messages.FusiontablesColumnListRequest(tableId='mytable')
results = list_pager.YieldFromList(client.column, request)
self._AssertInstanceSequence(results, 8)
def testYieldNoRecords(self):
client = fusiontables.FusiontablesV1(get_credentials=False)
request = messages.FusiontablesColumnListRequest(tableId='mytable')
results = list_pager.YieldFromList(client.column, request, limit=False)
self.assertEqual(0, len(list(results)))
def testYieldFromListPartial(self):
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=6,
pageToken=None,
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c0'),
messages.Column(name='c1'),
messages.Column(name='c2'),
messages.Column(name='c3'),
],
nextPageToken='x',
))
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=2,
pageToken='x',
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c4'),
messages.Column(name='c5'),
messages.Column(name='c6'),
messages.Column(name='c7'),
],
))
client = fusiontables.FusiontablesV1(get_credentials=False)
request = messages.FusiontablesColumnListRequest(tableId='mytable')
results = list_pager.YieldFromList(client.column, request, limit=6)
self._AssertInstanceSequence(results, 6)
def testYieldFromListPaging(self):
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=5,
pageToken=None,
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c0'),
messages.Column(name='c1'),
messages.Column(name='c2'),
messages.Column(name='c3'),
messages.Column(name='c4'),
],
nextPageToken='x',
))
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=4,
pageToken='x',
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c5'),
messages.Column(name='c6'),
messages.Column(name='c7'),
messages.Column(name='c8'),
],
))
client = fusiontables.FusiontablesV1(get_credentials=False)
request = messages.FusiontablesColumnListRequest(tableId='mytable')
results = list_pager.YieldFromList(client.column,
request,
limit=9,
batch_size=5)
self._AssertInstanceSequence(results, 9)
def testYieldFromListBatchSizeNone(self):
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=None,
pageToken=None,
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c0'),
messages.Column(name='c1'),
messages.Column(name='c2'),
messages.Column(name='c3'),
messages.Column(name='c4'),
messages.Column(name='c5'),
messages.Column(name='c6'),
],
nextPageToken='x',
))
client = fusiontables.FusiontablesV1(get_credentials=False)
request = messages.FusiontablesColumnListRequest(tableId='mytable')
results = list_pager.YieldFromList(client.column,
request,
limit=5,
batch_size=None)
self._AssertInstanceSequence(results, 5)
def testYieldFromListEmpty(self):
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=6,
pageToken=None,
tableId='mytable',
),
messages.ColumnList())
client = fusiontables.FusiontablesV1(get_credentials=False)
request = messages.FusiontablesColumnListRequest(tableId='mytable')
results = list_pager.YieldFromList(client.column, request, limit=6)
self._AssertInstanceSequence(results, 0)
def testYieldFromListWithPredicate(self):
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=100,
pageToken=None,
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c0'),
messages.Column(name='bad0'),
messages.Column(name='c1'),
messages.Column(name='bad1'),
],
nextPageToken='x',
))
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=100,
pageToken='x',
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c2'),
],
))
client = fusiontables.FusiontablesV1(get_credentials=False)
request = messages.FusiontablesColumnListRequest(tableId='mytable')
results = list_pager.YieldFromList(
client.column, request, predicate=lambda x: 'c' in x.name)
self._AssertInstanceSequence(results, 3)
def testYieldFromListWithCustomGetFieldFunction(self):
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=100,
pageToken=None,
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c0')
]
))
custom_getter_called = []
def Custom_Getter(message, attribute):
custom_getter_called.append(True)
return getattr(message, attribute)
client = fusiontables.FusiontablesV1(get_credentials=False)
request = messages.FusiontablesColumnListRequest(tableId='mytable')
results = list_pager.YieldFromList(
client.column, request, get_field_func=Custom_Getter)
self._AssertInstanceSequence(results, 1)
self.assertEqual(1, len(custom_getter_called))
class ListPagerAttributeTest(unittest.TestCase):
def setUp(self):
self.mocked_client = mock.Client(iam_client.IamV1)
self.mocked_client.Mock()
self.addCleanup(self.mocked_client.Unmock)
def testYieldFromListWithAttributes(self):
self.mocked_client.iamPolicies.GetPolicyDetails.Expect(
iam_messages.GetPolicyDetailsRequest(
pageSize=100,
pageToken=None,
fullResourcePath='myresource',
),
iam_messages.GetPolicyDetailsResponse(
policies=[
iam_messages.PolicyDetail(fullResourcePath='c0'),
iam_messages.PolicyDetail(fullResourcePath='c1'),
],
nextPageToken='x',
))
self.mocked_client.iamPolicies.GetPolicyDetails.Expect(
iam_messages.GetPolicyDetailsRequest(
pageSize=100,
pageToken='x',
fullResourcePath='myresource',
),
iam_messages.GetPolicyDetailsResponse(
policies=[
iam_messages.PolicyDetail(fullResourcePath='c2'),
],
))
client = iam_client.IamV1(get_credentials=False)
request = iam_messages.GetPolicyDetailsRequest(
fullResourcePath='myresource')
results = list_pager.YieldFromList(
client.iamPolicies, request,
batch_size_attribute='pageSize',
method='GetPolicyDetails', field='policies')
i = 0
for i, instance in enumerate(results):
self.assertEqual('c{0}'.format(i), instance.fullResourcePath)
self.assertEqual(2, i)
def testYieldFromListWithNoBatchSizeAttribute(self):
self.mocked_client.iamPolicies.GetPolicyDetails.Expect(
iam_messages.GetPolicyDetailsRequest(
pageToken=None,
fullResourcePath='myresource',
),
iam_messages.GetPolicyDetailsResponse(
policies=[
iam_messages.PolicyDetail(fullResourcePath='c0'),
iam_messages.PolicyDetail(fullResourcePath='c1'),
],
))
client = iam_client.IamV1(get_credentials=False)
request = iam_messages.GetPolicyDetailsRequest(
fullResourcePath='myresource')
results = list_pager.YieldFromList(
client.iamPolicies, request,
batch_size_attribute=None,
method='GetPolicyDetails', field='policies')
i = 0
for i, instance in enumerate(results):
self.assertEqual('c{0}'.format(i), instance.fullResourcePath)
self.assertEqual(1, i)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/http_wrapper_test.py | apitools/base/py/http_wrapper_test.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for http_wrapper."""
import socket
import unittest
import httplib2
from six.moves import http_client
from mock import patch
from apitools.base.py import exceptions
from apitools.base.py import http_wrapper
# pylint: disable=ungrouped-imports
try:
from oauth2client.client import HttpAccessTokenRefreshError
from oauth2client.client import AccessTokenRefreshError
_TOKEN_REFRESH_STATUS_AVAILABLE = True
except ImportError:
from oauth2client.client import AccessTokenRefreshError
_TOKEN_REFRESH_STATUS_AVAILABLE = False
class _MockHttpRequest(object):
url = None
class _MockHttpResponse(object):
def __init__(self, status_code):
self.response = {'status': status_code}
class RaisesExceptionOnLen(object):
"""Supports length property but raises if __len__ is used."""
def __len__(self):
raise Exception('len() called unnecessarily')
def length(self):
return 1
class HttpWrapperTest(unittest.TestCase):
def testRequestBodyUsesLengthProperty(self):
http_wrapper.Request(body=RaisesExceptionOnLen())
def testRequestBodyWithLen(self):
http_wrapper.Request(body='burrito')
@unittest.skipIf(not _TOKEN_REFRESH_STATUS_AVAILABLE,
'oauth2client<1.5 lacks HttpAccessTokenRefreshError.')
def testExceptionHandlerHttpAccessTokenError(self):
exception_arg = HttpAccessTokenRefreshError(status=503)
retry_args = http_wrapper.ExceptionRetryArgs(
http={'connections': {}}, http_request=_MockHttpRequest(),
exc=exception_arg, num_retries=0, max_retry_wait=0,
total_wait_sec=0)
# Disable time.sleep for this handler as it is called with
# a minimum value of 1 second.
with patch('time.sleep', return_value=None):
http_wrapper.HandleExceptionsAndRebuildHttpConnections(
retry_args)
@unittest.skipIf(not _TOKEN_REFRESH_STATUS_AVAILABLE,
'oauth2client<1.5 lacks HttpAccessTokenRefreshError.')
def testExceptionHandlerHttpAccessTokenErrorRaises(self):
exception_arg = HttpAccessTokenRefreshError(status=200)
retry_args = http_wrapper.ExceptionRetryArgs(
http={'connections': {}}, http_request=_MockHttpRequest(),
exc=exception_arg, num_retries=0, max_retry_wait=0,
total_wait_sec=0)
# Disable time.sleep for this handler as it is called with
# a minimum value of 1 second.
with self.assertRaises(HttpAccessTokenRefreshError):
with patch('time.sleep', return_value=None):
http_wrapper.HandleExceptionsAndRebuildHttpConnections(
retry_args)
def testExceptionHandlerAccessTokenErrorRaises(self):
exception_arg = AccessTokenRefreshError()
retry_args = http_wrapper.ExceptionRetryArgs(
http={'connections': {}}, http_request=_MockHttpRequest(),
exc=exception_arg, num_retries=0, max_retry_wait=0,
total_wait_sec=0)
# Disable time.sleep for this handler as it is called with
# a minimum value of 1 second.
with self.assertRaises(AccessTokenRefreshError):
with patch('time.sleep', return_value=None):
http_wrapper.HandleExceptionsAndRebuildHttpConnections(
retry_args)
def testDefaultExceptionHandler(self):
"""Ensures exception handles swallows (retries)"""
mock_http_content = 'content'.encode('utf8')
for exception_arg in (
http_client.BadStatusLine('line'),
http_client.IncompleteRead('partial'),
http_client.ResponseNotReady(),
socket.error(),
socket.gaierror(),
httplib2.ServerNotFoundError(),
ValueError(),
exceptions.RequestError(),
exceptions.BadStatusCodeError(
{'status': 503}, mock_http_content, 'url'),
exceptions.RetryAfterError(
{'status': 429}, mock_http_content, 'url', 0)):
retry_args = http_wrapper.ExceptionRetryArgs(
http={'connections': {}}, http_request=_MockHttpRequest(),
exc=exception_arg, num_retries=0, max_retry_wait=0,
total_wait_sec=0)
# Disable time.sleep for this handler as it is called with
# a minimum value of 1 second.
with patch('time.sleep', return_value=None):
http_wrapper.HandleExceptionsAndRebuildHttpConnections(
retry_args)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/batch_test.py | apitools/base/py/batch_test.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for apitools.base.py.batch."""
import textwrap
import unittest
import mock
from six.moves import http_client
from six.moves import range # pylint:disable=redefined-builtin
from six.moves.urllib import parse
from apitools.base.py import batch
from apitools.base.py import exceptions
from apitools.base.py import http_wrapper
class FakeCredentials(object):
def __init__(self):
self.num_refreshes = 0
def refresh(self, _):
self.num_refreshes += 1
class FakeHttp(object):
class FakeRequest(object):
def __init__(self, credentials=None):
if credentials is not None:
self.credentials = credentials
def __init__(self, credentials=None):
self.request = FakeHttp.FakeRequest(credentials=credentials)
class FakeService(object):
"""A service for testing."""
def GetMethodConfig(self, _):
return {}
def GetUploadConfig(self, _):
return {}
# pylint: disable=unused-argument
def PrepareHttpRequest(
self, method_config, request, global_params, upload_config):
return global_params['desired_request']
# pylint: enable=unused-argument
def ProcessHttpResponse(self, _, http_response):
return http_response
class BatchTest(unittest.TestCase):
def assertUrlEqual(self, expected_url, provided_url):
def parse_components(url):
parsed = parse.urlsplit(url)
query = parse.parse_qs(parsed.query)
return parsed._replace(query=''), query
expected_parse, expected_query = parse_components(expected_url)
provided_parse, provided_query = parse_components(provided_url)
self.assertEqual(expected_parse, provided_parse)
self.assertEqual(expected_query, provided_query)
def __ConfigureMock(self, mock_request, expected_request, response):
if isinstance(response, list):
response = list(response)
def CheckRequest(_, request, **unused_kwds):
self.assertUrlEqual(expected_request.url, request.url)
self.assertEqual(expected_request.http_method, request.http_method)
if isinstance(response, list):
return response.pop(0)
return response
mock_request.side_effect = CheckRequest
def testRequestServiceUnavailable(self):
mock_service = FakeService()
desired_url = 'https://www.example.com'
batch_api_request = batch.BatchApiRequest(batch_url=desired_url,
retryable_codes=[])
# The request to be added. The actual request sent will be somewhat
# larger, as this is added to a batch.
desired_request = http_wrapper.Request(desired_url, 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 80,
}, 'x' * 80)
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as mock_request:
self.__ConfigureMock(
mock_request,
http_wrapper.Request(desired_url, 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 419,
}, 'x' * 419),
http_wrapper.Response({
'status': '200',
'content-type': 'multipart/mixed; boundary="boundary"',
}, textwrap.dedent("""\
--boundary
content-type: text/plain
content-id: <id+0>
HTTP/1.1 503 SERVICE UNAVAILABLE
nope
--boundary--"""), None))
batch_api_request.Add(
mock_service, 'unused', None,
global_params={'desired_request': desired_request})
api_request_responses = batch_api_request.Execute(
FakeHttp(), sleep_between_polls=0)
self.assertEqual(1, len(api_request_responses))
# Make sure we didn't retry non-retryable code 503.
self.assertEqual(1, mock_request.call_count)
self.assertTrue(api_request_responses[0].is_error)
self.assertIsNone(api_request_responses[0].response)
self.assertIsInstance(api_request_responses[0].exception,
exceptions.HttpError)
def testSingleRequestInBatch(self):
desired_url = 'https://www.example.com'
callback_was_called = []
def _Callback(response, exception):
self.assertEqual({'status': '200'}, response.info)
self.assertEqual('content', response.content)
self.assertEqual(desired_url, response.request_url)
self.assertIsNone(exception)
callback_was_called.append(1)
mock_service = FakeService()
batch_api_request = batch.BatchApiRequest(batch_url=desired_url)
# The request to be added. The actual request sent will be somewhat
# larger, as this is added to a batch.
desired_request = http_wrapper.Request(desired_url, 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 80,
}, 'x' * 80)
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as mock_request:
self.__ConfigureMock(
mock_request,
http_wrapper.Request(desired_url, 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 419,
}, 'x' * 419),
http_wrapper.Response({
'status': '200',
'content-type': 'multipart/mixed; boundary="boundary"',
}, textwrap.dedent("""\
--boundary
content-type: text/plain
content-id: <id+0>
HTTP/1.1 200 OK
content
--boundary--"""), None))
batch_api_request.Add(mock_service, 'unused', None, {
'desired_request': desired_request,
})
api_request_responses = batch_api_request.Execute(
FakeHttp(), batch_request_callback=_Callback)
self.assertEqual(1, len(api_request_responses))
self.assertEqual(1, mock_request.call_count)
self.assertFalse(api_request_responses[0].is_error)
response = api_request_responses[0].response
self.assertEqual({'status': '200'}, response.info)
self.assertEqual('content', response.content)
self.assertEqual(desired_url, response.request_url)
self.assertEqual(1, len(callback_was_called))
def _MakeResponse(self, number_of_parts):
return http_wrapper.Response(
info={
'status': '200',
'content-type': 'multipart/mixed; boundary="boundary"',
},
content='--boundary\n' + '--boundary\n'.join(
textwrap.dedent("""\
content-type: text/plain
content-id: <id+{0}>
HTTP/1.1 200 OK
response {0} content
""")
.format(i) for i in range(number_of_parts)) + '--boundary--',
request_url=None,
)
def _MakeSampleRequest(self, url, name):
return http_wrapper.Request(url, 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 80,
}, '{0} {1}'.format(name, 'x' * (79 - len(name))))
def testMultipleRequestInBatchWithMax(self):
mock_service = FakeService()
desired_url = 'https://www.example.com'
batch_api_request = batch.BatchApiRequest(batch_url=desired_url)
number_of_requests = 10
max_batch_size = 3
for i in range(number_of_requests):
batch_api_request.Add(
mock_service, 'unused', None,
{'desired_request': self._MakeSampleRequest(
desired_url, 'Sample-{0}'.format(i))})
responses = []
for i in range(0, number_of_requests, max_batch_size):
responses.append(
self._MakeResponse(
min(number_of_requests - i, max_batch_size)))
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as mock_request:
self.__ConfigureMock(
mock_request,
expected_request=http_wrapper.Request(desired_url, 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 1142,
}, 'x' * 1142),
response=responses)
api_request_responses = batch_api_request.Execute(
FakeHttp(), max_batch_size=max_batch_size)
self.assertEqual(number_of_requests, len(api_request_responses))
self.assertEqual(
-(-number_of_requests // max_batch_size),
mock_request.call_count)
def testRefreshOnAuthFailure(self):
mock_service = FakeService()
desired_url = 'https://www.example.com'
batch_api_request = batch.BatchApiRequest(batch_url=desired_url)
# The request to be added. The actual request sent will be somewhat
# larger, as this is added to a batch.
desired_request = http_wrapper.Request(desired_url, 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 80,
}, 'x' * 80)
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as mock_request:
self.__ConfigureMock(
mock_request,
http_wrapper.Request(desired_url, 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 419,
}, 'x' * 419), [
http_wrapper.Response({
'status': '200',
'content-type': 'multipart/mixed; boundary="boundary"',
}, textwrap.dedent("""\
--boundary
content-type: text/plain
content-id: <id+0>
HTTP/1.1 401 UNAUTHORIZED
Invalid grant
--boundary--"""), None),
http_wrapper.Response({
'status': '200',
'content-type': 'multipart/mixed; boundary="boundary"',
}, textwrap.dedent("""\
--boundary
content-type: text/plain
content-id: <id+0>
HTTP/1.1 200 OK
content
--boundary--"""), None)
])
batch_api_request.Add(mock_service, 'unused', None, {
'desired_request': desired_request,
})
credentials = FakeCredentials()
api_request_responses = batch_api_request.Execute(
FakeHttp(credentials=credentials), sleep_between_polls=0)
self.assertEqual(1, len(api_request_responses))
self.assertEqual(2, mock_request.call_count)
self.assertEqual(1, credentials.num_refreshes)
self.assertFalse(api_request_responses[0].is_error)
response = api_request_responses[0].response
self.assertEqual({'status': '200'}, response.info)
self.assertEqual('content', response.content)
self.assertEqual(desired_url, response.request_url)
def testNoAttempts(self):
desired_url = 'https://www.example.com'
batch_api_request = batch.BatchApiRequest(batch_url=desired_url)
batch_api_request.Add(FakeService(), 'unused', None, {
'desired_request': http_wrapper.Request(desired_url, 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 80,
}, 'x' * 80),
})
api_request_responses = batch_api_request.Execute(None, max_retries=0)
self.assertEqual(1, len(api_request_responses))
self.assertIsNone(api_request_responses[0].response)
self.assertIsNone(api_request_responses[0].exception)
def _DoTestConvertIdToHeader(self, test_id, expected_result):
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertEqual(
expected_result % batch_request._BatchHttpRequest__base_id,
batch_request._ConvertIdToHeader(test_id))
def testConvertIdSimple(self):
self._DoTestConvertIdToHeader('blah', '<%s+blah>')
def testConvertIdThatNeedsEscaping(self):
self._DoTestConvertIdToHeader(' space1', '<%s+%%20space1>')
def _DoTestConvertHeaderToId(self, header, expected_id):
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertEqual(expected_id,
batch_request._ConvertHeaderToId(header))
def testConvertHeaderToIdSimple(self):
self._DoTestConvertHeaderToId('<hello+blah>', 'blah')
def testConvertHeaderToIdWithLotsOfPlus(self):
self._DoTestConvertHeaderToId('<a+++++plus>', 'plus')
def _DoTestConvertInvalidHeaderToId(self, invalid_header):
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertRaises(exceptions.BatchError,
batch_request._ConvertHeaderToId, invalid_header)
def testHeaderWithoutAngleBrackets(self):
self._DoTestConvertInvalidHeaderToId('1+1')
def testHeaderWithoutPlus(self):
self._DoTestConvertInvalidHeaderToId('<HEADER>')
def testSerializeRequest(self):
request = http_wrapper.Request(body='Hello World', headers={
'content-type': 'protocol/version',
})
expected_serialized_request = '\n'.join([
'GET HTTP/1.1',
'Content-Type: protocol/version',
'MIME-Version: 1.0',
'content-length: 11',
'Host: ',
'',
'Hello World',
])
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertEqual(expected_serialized_request,
batch_request._SerializeRequest(request))
def testSerializeRequestPreservesHeaders(self):
# Now confirm that if an additional, arbitrary header is added
# that it is successfully serialized to the request. Merely
# check that it is included, because the order of the headers
# in the request is arbitrary.
request = http_wrapper.Request(body='Hello World', headers={
'content-type': 'protocol/version',
'key': 'value',
})
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertTrue(
'key: value\n' in batch_request._SerializeRequest(request))
def testSerializeRequestNoBody(self):
request = http_wrapper.Request(body=None, headers={
'content-type': 'protocol/version',
})
expected_serialized_request = '\n'.join([
'GET HTTP/1.1',
'Content-Type: protocol/version',
'MIME-Version: 1.0',
'Host: ',
'',
'',
])
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertEqual(expected_serialized_request,
batch_request._SerializeRequest(request))
def testSerializeRequestWithPathAndQueryParams(self):
request = http_wrapper.Request(
url='my/path?query=param',
body='Hello World',
headers={'content-type': 'protocol/version'})
expected_serialized_request = '\n'.join([
'GET my/path?query=param HTTP/1.1',
'Content-Type: protocol/version',
'MIME-Version: 1.0',
'content-length: 11',
'Host: ',
'',
'Hello World',
])
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertEqual(expected_serialized_request,
batch_request._SerializeRequest(request))
def testDeserializeRequest(self):
serialized_payload = '\n'.join([
'GET HTTP/1.1',
'Content-Type: protocol/version',
'MIME-Version: 1.0',
'content-length: 11',
'key: value',
'Host: ',
'',
'Hello World',
])
example_url = 'https://www.example.com'
expected_response = http_wrapper.Response({
'content-length': str(len('Hello World')),
'Content-Type': 'protocol/version',
'key': 'value',
'MIME-Version': '1.0',
'status': '',
'Host': ''
}, 'Hello World', example_url)
batch_request = batch.BatchHttpRequest(example_url)
self.assertEqual(
expected_response,
batch_request._DeserializeResponse(serialized_payload))
def testNewId(self):
batch_request = batch.BatchHttpRequest('https://www.example.com')
for i in range(100):
self.assertEqual(str(i), batch_request._NewId())
def testAdd(self):
batch_request = batch.BatchHttpRequest('https://www.example.com')
for x in range(100):
batch_request.Add(http_wrapper.Request(body=str(x)))
for key in batch_request._BatchHttpRequest__request_response_handlers:
value = batch_request._BatchHttpRequest__request_response_handlers[
key]
self.assertEqual(key, value.request.body)
self.assertFalse(value.request.url)
self.assertEqual('GET', value.request.http_method)
self.assertIsNone(value.response)
self.assertIsNone(value.handler)
def testInternalExecuteWithFailedRequest(self):
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as mock_request:
self.__ConfigureMock(
mock_request,
http_wrapper.Request('https://www.example.com', 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 80,
}, 'x' * 80),
http_wrapper.Response({'status': '300'}, None, None))
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertRaises(
exceptions.HttpError, batch_request._Execute, None)
def testInternalExecuteWithNonMultipartResponse(self):
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as mock_request:
self.__ConfigureMock(
mock_request,
http_wrapper.Request('https://www.example.com', 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 80,
}, 'x' * 80),
http_wrapper.Response({
'status': '200',
'content-type': 'blah/blah'
}, '', None))
batch_request = batch.BatchHttpRequest('https://www.example.com')
self.assertRaises(
exceptions.BatchError, batch_request._Execute, None)
def testInternalExecute(self):
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as mock_request:
self.__ConfigureMock(
mock_request,
http_wrapper.Request('https://www.example.com', 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 583,
}, 'x' * 583),
http_wrapper.Response({
'status': '200',
'content-type': 'multipart/mixed; boundary="boundary"',
}, textwrap.dedent("""\
--boundary
content-type: text/plain
content-id: <id+2>
HTTP/1.1 200 OK
Second response
--boundary
content-type: text/plain
content-id: <id+1>
HTTP/1.1 401 UNAUTHORIZED
First response
--boundary--"""), None))
test_requests = {
'1': batch.RequestResponseAndHandler(
http_wrapper.Request(body='first'), None, None),
'2': batch.RequestResponseAndHandler(
http_wrapper.Request(body='second'), None, None),
}
batch_request = batch.BatchHttpRequest('https://www.example.com')
batch_request._BatchHttpRequest__request_response_handlers = (
test_requests)
batch_request._Execute(FakeHttp())
test_responses = (
batch_request._BatchHttpRequest__request_response_handlers)
self.assertEqual(http_client.UNAUTHORIZED,
test_responses['1'].response.status_code)
self.assertEqual(http_client.OK,
test_responses['2'].response.status_code)
self.assertIn(
'First response', test_responses['1'].response.content)
self.assertIn(
'Second response', test_responses['2'].response.content)
def testInternalExecuteWithEncodedResponse(self):
with mock.patch.object(http_wrapper, 'MakeRequest',
autospec=True) as mock_request:
self.__ConfigureMock(
mock_request,
http_wrapper.Request('https://www.example.com', 'POST', {
'content-type': 'multipart/mixed; boundary="None"',
'content-length': 274,
}, 'x' * 274),
http_wrapper.Response({
'status': '200',
'content-type': 'multipart/mixed; boundary="boundary"',
}, textwrap.dedent("""\
--boundary
content-type: text/plain
content-id: <id+1>
HTTP/1.1 200 OK
response
--boundary--""").encode('utf-8'), None))
test_request = {
'1': batch.RequestResponseAndHandler(
http_wrapper.Request(body='first'), None, None),
}
batch_request = batch.BatchHttpRequest('https://www.example.com',
response_encoding='utf-8')
batch_request._BatchHttpRequest__request_response_handlers = (
test_request)
batch_request._Execute(FakeHttp())
test_responses = (
batch_request._BatchHttpRequest__request_response_handlers)
self.assertEqual(http_client.OK,
test_responses['1'].response.status_code)
self.assertIn(
'response', test_responses['1'].response.content)
def testPublicExecute(self):
def LocalCallback(response, exception):
self.assertEqual({'status': '418'}, response.info)
self.assertEqual('Teapot', response.content)
self.assertIsNone(response.request_url)
self.assertIsInstance(exception, exceptions.HttpError)
global_callback = mock.Mock()
batch_request = batch.BatchHttpRequest(
'https://www.example.com', global_callback)
with mock.patch.object(batch.BatchHttpRequest, '_Execute',
autospec=True) as mock_execute:
mock_execute.return_value = None
test_requests = {
'0': batch.RequestResponseAndHandler(
None,
http_wrapper.Response({'status': '200'}, 'Hello!', None),
None),
'1': batch.RequestResponseAndHandler(
None,
http_wrapper.Response({'status': '418'}, 'Teapot', None),
LocalCallback),
}
batch_request._BatchHttpRequest__request_response_handlers = (
test_requests)
batch_request.Execute(None)
# Global callback was called once per handler.
self.assertEqual(len(test_requests), global_callback.call_count)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/encoding.py | apitools/base/py/encoding.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common code for converting proto to other formats, such as JSON."""
# pylint:disable=wildcard-import
from apitools.base.py.encoding_helper import *
import apitools.base.py.extra_types # pylint:disable=unused-import
# pylint:disable=undefined-all-variable
__all__ = [
'CopyProtoMessage',
'JsonToMessage',
'MessageToJson',
'DictToMessage',
'MessageToDict',
'PyValueToMessage',
'MessageToPyValue',
'MessageToRepr',
'GetCustomJsonFieldMapping',
'AddCustomJsonFieldMapping',
'GetCustomJsonEnumMapping',
'AddCustomJsonEnumMapping',
]
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/__init__.py | apitools/base/py/__init__.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Top-level imports for apitools base files."""
# pylint:disable=wildcard-import
# pylint:disable=redefined-builtin
from apitools.base.py.base_api import *
from apitools.base.py.batch import *
from apitools.base.py.credentials_lib import *
from apitools.base.py.encoding import *
from apitools.base.py.exceptions import *
from apitools.base.py.extra_types import *
from apitools.base.py.http_wrapper import *
from apitools.base.py.list_pager import *
from apitools.base.py.transfer import *
from apitools.base.py.util import *
try:
# pylint:disable=no-name-in-module
from apitools.base.py.internal import *
except ImportError:
pass
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/transfer.py | apitools/base/py/transfer.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Upload and download support for apitools."""
from __future__ import print_function
import email.generator as email_generator
import email.mime.multipart as mime_multipart
import email.mime.nonmultipart as mime_nonmultipart
import io
import json
import mimetypes
import os
import threading
import six
from six.moves import http_client
from apitools.base.py import buffered_stream
from apitools.base.py import compression
from apitools.base.py import exceptions
from apitools.base.py import http_wrapper
from apitools.base.py import stream_slice
from apitools.base.py import util
__all__ = [
'Download',
'Upload',
'RESUMABLE_UPLOAD',
'SIMPLE_UPLOAD',
'DownloadProgressPrinter',
'DownloadCompletePrinter',
'UploadProgressPrinter',
'UploadCompletePrinter',
]
_RESUMABLE_UPLOAD_THRESHOLD = 5 << 20
SIMPLE_UPLOAD = 'simple'
RESUMABLE_UPLOAD = 'resumable'
def DownloadProgressPrinter(response, unused_download):
"""Print download progress based on response."""
if 'content-range' in response.info:
print('Received %s' % response.info['content-range'])
else:
print('Received %d bytes' % response.length)
def DownloadCompletePrinter(unused_response, unused_download):
"""Print information about a completed download."""
print('Download complete')
def UploadProgressPrinter(response, unused_upload):
"""Print upload progress based on response."""
print('Sent %s' % response.info['range'])
def UploadCompletePrinter(unused_response, unused_upload):
"""Print information about a completed upload."""
print('Upload complete')
class _Transfer(object):
"""Generic bits common to Uploads and Downloads."""
def __init__(self, stream, close_stream=False, chunksize=None,
auto_transfer=True, http=None, num_retries=5):
self.__bytes_http = None
self.__close_stream = close_stream
self.__http = http
self.__stream = stream
self.__url = None
self.__num_retries = 5
# Let the @property do validation
self.num_retries = num_retries
self.retry_func = (
http_wrapper.HandleExceptionsAndRebuildHttpConnections)
self.auto_transfer = auto_transfer
self.chunksize = chunksize or 1048576
def __repr__(self):
return str(self)
@property
def close_stream(self):
return self.__close_stream
@property
def http(self):
return self.__http
@property
def bytes_http(self):
return self.__bytes_http or self.http
@bytes_http.setter
def bytes_http(self, value):
self.__bytes_http = value
@property
def num_retries(self):
return self.__num_retries
@num_retries.setter
def num_retries(self, value):
util.Typecheck(value, six.integer_types)
if value < 0:
raise exceptions.InvalidDataError(
'Cannot have negative value for num_retries')
self.__num_retries = value
@property
def stream(self):
return self.__stream
@property
def url(self):
return self.__url
def _Initialize(self, http, url):
"""Initialize this download by setting self.http and self.url.
We want the user to be able to override self.http by having set
the value in the constructor; in that case, we ignore the provided
http.
Args:
http: An httplib2.Http instance or None.
url: The url for this transfer.
Returns:
None. Initializes self.
"""
self.EnsureUninitialized()
if self.http is None:
self.__http = http or http_wrapper.GetHttp()
self.__url = url
@property
def initialized(self):
return self.url is not None and self.http is not None
@property
def _type_name(self):
return type(self).__name__
def EnsureInitialized(self):
if not self.initialized:
raise exceptions.TransferInvalidError(
'Cannot use uninitialized %s' % self._type_name)
def EnsureUninitialized(self):
if self.initialized:
raise exceptions.TransferInvalidError(
'Cannot re-initialize %s' % self._type_name)
def __del__(self):
if self.__close_stream:
self.__stream.close()
def _ExecuteCallback(self, callback, response):
# TODO(craigcitro): Push these into a queue.
if callback is not None:
threading.Thread(target=callback, args=(response, self)).start()
class Download(_Transfer):
"""Data for a single download.
Public attributes:
chunksize: default chunksize to use for transfers.
"""
_ACCEPTABLE_STATUSES = set((
http_client.OK,
http_client.NO_CONTENT,
http_client.PARTIAL_CONTENT,
http_client.REQUESTED_RANGE_NOT_SATISFIABLE,
))
_REQUIRED_SERIALIZATION_KEYS = set((
'auto_transfer', 'progress', 'total_size', 'url'))
def __init__(self, stream, progress_callback=None, finish_callback=None,
**kwds):
total_size = kwds.pop('total_size', None)
super(Download, self).__init__(stream, **kwds)
self.__initial_response = None
self.__progress = 0
self.__total_size = total_size
self.__encoding = None
self.progress_callback = progress_callback
self.finish_callback = finish_callback
@property
def progress(self):
return self.__progress
@property
def encoding(self):
return self.__encoding
@classmethod
def FromFile(cls, filename, overwrite=False, auto_transfer=True, **kwds):
"""Create a new download object from a filename."""
path = os.path.expanduser(filename)
if os.path.exists(path) and not overwrite:
raise exceptions.InvalidUserInputError(
'File %s exists and overwrite not specified' % path)
return cls(open(path, 'wb'), close_stream=True,
auto_transfer=auto_transfer, **kwds)
@classmethod
def FromStream(cls, stream, auto_transfer=True, total_size=None, **kwds):
"""Create a new Download object from a stream."""
return cls(stream, auto_transfer=auto_transfer, total_size=total_size,
**kwds)
@classmethod
def FromData(cls, stream, json_data, http=None, auto_transfer=None,
client=None, **kwds):
"""Create a new Download object from a stream and serialized data."""
info = json.loads(json_data)
missing_keys = cls._REQUIRED_SERIALIZATION_KEYS - set(info.keys())
if missing_keys:
raise exceptions.InvalidDataError(
'Invalid serialization data, missing keys: %s' % (
', '.join(missing_keys)))
download = cls.FromStream(stream, **kwds)
if auto_transfer is not None:
download.auto_transfer = auto_transfer
else:
download.auto_transfer = info['auto_transfer']
if client is not None:
url = client.FinalizeTransferUrl(info['url'])
else:
url = info['url']
setattr(download, '_Download__progress', info['progress'])
setattr(download, '_Download__total_size', info['total_size'])
download._Initialize( # pylint: disable=protected-access
http, url)
return download
@property
def serialization_data(self):
self.EnsureInitialized()
return {
'auto_transfer': self.auto_transfer,
'progress': self.progress,
'total_size': self.total_size,
'url': self.url,
}
@property
def total_size(self):
return self.__total_size
def __str__(self):
if not self.initialized:
return 'Download (uninitialized)'
return 'Download with %d/%s bytes transferred from url %s' % (
self.progress, self.total_size, self.url)
def ConfigureRequest(self, http_request, url_builder):
url_builder.query_params['alt'] = 'media'
# TODO(craigcitro): We need to send range requests because by
# default httplib2 stores entire reponses in memory. Override
# httplib2's download method (as gsutil does) so that this is not
# necessary.
http_request.headers['Range'] = 'bytes=0-%d' % (self.chunksize - 1,)
def __SetTotal(self, info):
"""Sets the total size based off info if possible otherwise 0."""
if 'content-range' in info:
_, _, total = info['content-range'].rpartition('/')
if total != '*':
self.__total_size = int(total)
# Note "total_size is None" means we don't know it; if no size
# info was returned on our initial range request, that means we
# have a 0-byte file. (That last statement has been verified
# empirically, but is not clearly documented anywhere.)
if self.total_size is None:
self.__total_size = 0
def InitializeDownload(self, http_request, http=None, client=None):
"""Initialize this download by making a request.
Args:
http_request: The HttpRequest to use to initialize this download.
http: The httplib2.Http instance for this request.
client: If provided, let this client process the final URL before
sending any additional requests. If client is provided and
http is not, client.http will be used instead.
"""
self.EnsureUninitialized()
if http is None and client is None:
raise exceptions.UserError('Must provide client or http.')
http = http or client.http
if client is not None:
http_request.url = client.FinalizeTransferUrl(http_request.url)
url = http_request.url
if self.auto_transfer:
end_byte = self.__ComputeEndByte(0)
self.__SetRangeHeader(http_request, 0, end_byte)
response = http_wrapper.MakeRequest(
self.bytes_http or http, http_request)
if response.status_code not in self._ACCEPTABLE_STATUSES:
raise exceptions.HttpError.FromResponse(response)
self.__initial_response = response
self.__SetTotal(response.info)
url = response.info.get('content-location', response.request_url)
if client is not None:
url = client.FinalizeTransferUrl(url)
self._Initialize(http, url)
# Unless the user has requested otherwise, we want to just
# go ahead and pump the bytes now.
if self.auto_transfer:
self.StreamInChunks()
def __NormalizeStartEnd(self, start, end=None):
"""Normalizes start and end values based on total size."""
if end is not None:
if start < 0:
raise exceptions.TransferInvalidError(
'Cannot have end index with negative start index ' +
'[start=%d, end=%d]' % (start, end))
elif start >= self.total_size:
raise exceptions.TransferInvalidError(
'Cannot have start index greater than total size ' +
'[start=%d, total_size=%d]' % (start, self.total_size))
end = min(end, self.total_size - 1)
if end < start:
raise exceptions.TransferInvalidError(
'Range requested with end[%s] < start[%s]' % (end, start))
return start, end
else:
if start < 0:
start = max(0, start + self.total_size)
return start, self.total_size - 1
def __SetRangeHeader(self, request, start, end=None):
if start < 0:
request.headers['range'] = 'bytes=%d' % start
elif end is None or end < start:
request.headers['range'] = 'bytes=%d-' % start
else:
request.headers['range'] = 'bytes=%d-%d' % (start, end)
def __ComputeEndByte(self, start, end=None, use_chunks=True):
"""Compute the last byte to fetch for this request.
This is all based on the HTTP spec for Range and
Content-Range.
Note that this is potentially confusing in several ways:
* the value for the last byte is 0-based, eg "fetch 10 bytes
from the beginning" would return 9 here.
* if we have no information about size, and don't want to
use the chunksize, we'll return None.
See the tests for more examples.
Args:
start: byte to start at.
end: (int or None, default: None) Suggested last byte.
use_chunks: (bool, default: True) If False, ignore self.chunksize.
Returns:
Last byte to use in a Range header, or None.
"""
end_byte = end
if start < 0 and not self.total_size:
return end_byte
if use_chunks:
alternate = start + self.chunksize - 1
if end_byte is not None:
end_byte = min(end_byte, alternate)
else:
end_byte = alternate
if self.total_size:
alternate = self.total_size - 1
if end_byte is not None:
end_byte = min(end_byte, alternate)
else:
end_byte = alternate
return end_byte
def __GetChunk(self, start, end, additional_headers=None):
"""Retrieve a chunk, and return the full response."""
self.EnsureInitialized()
request = http_wrapper.Request(url=self.url)
self.__SetRangeHeader(request, start, end=end)
if additional_headers is not None:
request.headers.update(additional_headers)
return http_wrapper.MakeRequest(
self.bytes_http, request, retry_func=self.retry_func,
retries=self.num_retries)
def __ProcessResponse(self, response):
"""Process response (by updating self and writing to self.stream)."""
if response.status_code not in self._ACCEPTABLE_STATUSES:
# We distinguish errors that mean we made a mistake in setting
# up the transfer versus something we should attempt again.
if response.status_code in (http_client.FORBIDDEN,
http_client.NOT_FOUND):
raise exceptions.HttpError.FromResponse(response)
else:
raise exceptions.TransferRetryError(response.content)
if response.status_code in (http_client.OK,
http_client.PARTIAL_CONTENT):
try:
self.stream.write(six.ensure_binary(response.content))
except TypeError:
self.stream.write(six.ensure_text(response.content))
self.__progress += response.length
if response.info and 'content-encoding' in response.info:
# TODO(craigcitro): Handle the case where this changes over a
# download.
self.__encoding = response.info['content-encoding']
elif response.status_code == http_client.NO_CONTENT:
# It's important to write something to the stream for the case
# of a 0-byte download to a file, as otherwise python won't
# create the file.
self.stream.write('')
return response
def GetRange(self, start, end=None, additional_headers=None,
use_chunks=True):
"""Retrieve a given byte range from this download, inclusive.
Range must be of one of these three forms:
* 0 <= start, end = None: Fetch from start to the end of the file.
* 0 <= start <= end: Fetch the bytes from start to end.
* start < 0, end = None: Fetch the last -start bytes of the file.
(These variations correspond to those described in the HTTP 1.1
protocol for range headers in RFC 2616, sec. 14.35.1.)
Args:
start: (int) Where to start fetching bytes. (See above.)
end: (int, optional) Where to stop fetching bytes. (See above.)
additional_headers: (bool, optional) Any additional headers to
pass with the request.
use_chunks: (bool, default: True) If False, ignore self.chunksize
and fetch this range in a single request.
Returns:
None. Streams bytes into self.stream.
"""
self.EnsureInitialized()
progress_end_normalized = False
if self.total_size is not None:
progress, end_byte = self.__NormalizeStartEnd(start, end)
progress_end_normalized = True
else:
progress = start
end_byte = end
while (not progress_end_normalized or end_byte is None or
progress <= end_byte):
end_byte = self.__ComputeEndByte(progress, end=end_byte,
use_chunks=use_chunks)
response = self.__GetChunk(progress, end_byte,
additional_headers=additional_headers)
if not progress_end_normalized:
self.__SetTotal(response.info)
progress, end_byte = self.__NormalizeStartEnd(start, end)
progress_end_normalized = True
response = self.__ProcessResponse(response)
progress += response.length
if response.length == 0:
if response.status_code == http_client.OK:
# There can legitimately be no Content-Length header sent
# in some cases (e.g., when there's a Transfer-Encoding
# header) and if this was a 200 response (as opposed to
# 206 Partial Content) we know we're done now without
# looping further on received length.
return
raise exceptions.TransferRetryError(
'Zero bytes unexpectedly returned in download response')
def StreamInChunks(self, callback=None, finish_callback=None,
additional_headers=None):
"""Stream the entire download in chunks."""
self.StreamMedia(callback=callback, finish_callback=finish_callback,
additional_headers=additional_headers,
use_chunks=True)
def StreamMedia(self, callback=None, finish_callback=None,
additional_headers=None, use_chunks=True):
"""Stream the entire download.
Args:
callback: (default: None) Callback to call as each chunk is
completed.
finish_callback: (default: None) Callback to call when the
download is complete.
additional_headers: (default: None) Additional headers to
include in fetching bytes.
use_chunks: (bool, default: True) If False, ignore self.chunksize
and stream this download in a single request.
Returns:
None. Streams bytes into self.stream.
"""
callback = callback or self.progress_callback
finish_callback = finish_callback or self.finish_callback
self.EnsureInitialized()
while True:
if self.__initial_response is not None:
response = self.__initial_response
self.__initial_response = None
else:
end_byte = self.__ComputeEndByte(self.progress,
use_chunks=use_chunks)
response = self.__GetChunk(
self.progress, end_byte,
additional_headers=additional_headers)
if self.total_size is None:
self.__SetTotal(response.info)
response = self.__ProcessResponse(response)
self._ExecuteCallback(callback, response)
if (response.status_code == http_client.OK or
self.progress >= self.total_size):
break
self._ExecuteCallback(finish_callback, response)
if six.PY3:
class MultipartBytesGenerator(email_generator.BytesGenerator):
"""Generates a bytes Message object tree for multipart messages
This is a BytesGenerator that has been modified to not attempt line
termination character modification in the bytes payload. Known to
work with the compat32 policy only. It may work on others, but not
tested. The outfp object must accept bytes in its write method.
"""
def _handle_text(self, msg):
# If the string has surrogates the original source was bytes, so
# just write it back out.
if msg._payload is None:
return
self.write(msg._payload)
def _encode(self, s):
return s.encode('ascii', 'surrogateescape')
# Default body handler
_writeBody = _handle_text
class Upload(_Transfer):
"""Data for a single Upload.
Fields:
stream: The stream to upload.
mime_type: MIME type of the upload.
total_size: (optional) Total upload size for the stream.
close_stream: (default: False) Whether or not we should close the
stream when finished with the upload.
auto_transfer: (default: True) If True, stream all bytes as soon as
the upload is created.
"""
_REQUIRED_SERIALIZATION_KEYS = set((
'auto_transfer', 'mime_type', 'total_size', 'url'))
def __init__(self, stream, mime_type, total_size=None, http=None,
close_stream=False, chunksize=None, auto_transfer=True,
progress_callback=None, finish_callback=None,
gzip_encoded=False, **kwds):
super(Upload, self).__init__(
stream, close_stream=close_stream, chunksize=chunksize,
auto_transfer=auto_transfer, http=http, **kwds)
self.__complete = False
self.__final_response = None
self.__mime_type = mime_type
self.__progress = 0
self.__server_chunk_granularity = None
self.__strategy = None
self.__total_size = None
self.__gzip_encoded = gzip_encoded
self.progress_callback = progress_callback
self.finish_callback = finish_callback
self.total_size = total_size
@property
def progress(self):
return self.__progress
@classmethod
def FromFile(cls, filename, mime_type=None, auto_transfer=True,
gzip_encoded=False, **kwds):
"""Create a new Upload object from a filename."""
path = os.path.expanduser(filename)
if not os.path.exists(path):
raise exceptions.NotFoundError('Could not find file %s' % path)
if not mime_type:
mime_type, _ = mimetypes.guess_type(path)
if mime_type is None:
raise exceptions.InvalidUserInputError(
'Could not guess mime type for %s' % path)
size = os.stat(path).st_size
return cls(open(path, 'rb'), mime_type, total_size=size,
close_stream=True, auto_transfer=auto_transfer,
gzip_encoded=gzip_encoded, **kwds)
@classmethod
def FromStream(cls, stream, mime_type, total_size=None, auto_transfer=True,
gzip_encoded=False, **kwds):
"""Create a new Upload object from a stream."""
if mime_type is None:
raise exceptions.InvalidUserInputError(
'No mime_type specified for stream')
return cls(stream, mime_type, total_size=total_size,
close_stream=False, auto_transfer=auto_transfer,
gzip_encoded=gzip_encoded, **kwds)
@classmethod
def FromData(cls, stream, json_data, http, auto_transfer=None,
gzip_encoded=False, client=None, **kwds):
"""Create a new Upload of stream from serialized json_data and http."""
info = json.loads(json_data)
missing_keys = cls._REQUIRED_SERIALIZATION_KEYS - set(info.keys())
if missing_keys:
raise exceptions.InvalidDataError(
'Invalid serialization data, missing keys: %s' % (
', '.join(missing_keys)))
if 'total_size' in kwds:
raise exceptions.InvalidUserInputError(
'Cannot override total_size on serialized Upload')
upload = cls.FromStream(stream, info['mime_type'],
total_size=info.get('total_size'),
gzip_encoded=gzip_encoded, **kwds)
if isinstance(stream, io.IOBase) and not stream.seekable():
raise exceptions.InvalidUserInputError(
'Cannot restart resumable upload on non-seekable stream')
if auto_transfer is not None:
upload.auto_transfer = auto_transfer
else:
upload.auto_transfer = info['auto_transfer']
if client is not None:
url = client.FinalizeTransferUrl(info['url'])
else:
url = info['url']
upload.strategy = RESUMABLE_UPLOAD
upload._Initialize( # pylint: disable=protected-access
http, url)
upload.RefreshResumableUploadState()
upload.EnsureInitialized()
if upload.auto_transfer:
upload.StreamInChunks()
return upload
@property
def serialization_data(self):
self.EnsureInitialized()
if self.strategy != RESUMABLE_UPLOAD:
raise exceptions.InvalidDataError(
'Serialization only supported for resumable uploads')
return {
'auto_transfer': self.auto_transfer,
'mime_type': self.mime_type,
'total_size': self.total_size,
'url': self.url,
}
@property
def complete(self):
return self.__complete
@property
def mime_type(self):
return self.__mime_type
def __str__(self):
if not self.initialized:
return 'Upload (uninitialized)'
return 'Upload with %d/%s bytes transferred for url %s' % (
self.progress, self.total_size or '???', self.url)
@property
def strategy(self):
return self.__strategy
@strategy.setter
def strategy(self, value):
if value not in (SIMPLE_UPLOAD, RESUMABLE_UPLOAD):
raise exceptions.UserError((
'Invalid value "%s" for upload strategy, must be one of '
'"simple" or "resumable".') % value)
self.__strategy = value
@property
def total_size(self):
return self.__total_size
@total_size.setter
def total_size(self, value):
self.EnsureUninitialized()
self.__total_size = value
def __SetDefaultUploadStrategy(self, upload_config, http_request):
"""Determine and set the default upload strategy for this upload.
We generally prefer simple or multipart, unless we're forced to
use resumable. This happens when any of (1) the upload is too
large, (2) the simple endpoint doesn't support multipart requests
and we have metadata, or (3) there is no simple upload endpoint.
Args:
upload_config: Configuration for the upload endpoint.
http_request: The associated http request.
Returns:
None.
"""
if upload_config.resumable_path is None:
self.strategy = SIMPLE_UPLOAD
if self.strategy is not None:
return
strategy = SIMPLE_UPLOAD
if (self.total_size is not None and
self.total_size > _RESUMABLE_UPLOAD_THRESHOLD):
strategy = RESUMABLE_UPLOAD
if http_request.body and not upload_config.simple_multipart:
strategy = RESUMABLE_UPLOAD
if not upload_config.simple_path:
strategy = RESUMABLE_UPLOAD
self.strategy = strategy
def ConfigureRequest(self, upload_config, http_request, url_builder):
"""Configure the request and url for this upload."""
# Validate total_size vs. max_size
if (self.total_size and upload_config.max_size and
self.total_size > upload_config.max_size):
raise exceptions.InvalidUserInputError(
'Upload too big: %s larger than max size %s' % (
self.total_size, upload_config.max_size))
# Validate mime type
if not util.AcceptableMimeType(upload_config.accept, self.mime_type):
raise exceptions.InvalidUserInputError(
'MIME type %s does not match any accepted MIME ranges %s' % (
self.mime_type, upload_config.accept))
self.__SetDefaultUploadStrategy(upload_config, http_request)
if self.strategy == SIMPLE_UPLOAD:
url_builder.relative_path = upload_config.simple_path
if http_request.body:
url_builder.query_params['uploadType'] = 'multipart'
self.__ConfigureMultipartRequest(http_request)
else:
url_builder.query_params['uploadType'] = 'media'
self.__ConfigureMediaRequest(http_request)
# Once the entire body is written, compress the body if configured
# to. Both multipart and media request uploads will read the
# entire stream into memory, which means full compression is also
# safe to perform. Because the strategy is set to SIMPLE_UPLOAD,
# StreamInChunks throws an exception, meaning double compression
# cannot happen.
if self.__gzip_encoded:
http_request.headers['Content-Encoding'] = 'gzip'
# Turn the body into a stream so that we can compress it, then
# read the compressed bytes. In the event of a retry (e.g. if
# our access token has expired), we need to be able to re-read
# the body, which we can't do with a stream. So, we consume the
# bytes from the stream now and store them in a re-readable
# bytes container.
http_request.body = (
compression.CompressStream(
six.BytesIO(http_request.body))[0].read())
else:
url_builder.relative_path = upload_config.resumable_path
url_builder.query_params['uploadType'] = 'resumable'
self.__ConfigureResumableRequest(http_request)
def __ConfigureMediaRequest(self, http_request):
"""Configure http_request as a simple request for this upload."""
http_request.headers['content-type'] = self.mime_type
http_request.body = self.stream.read()
http_request.loggable_body = '<media body>'
def __ConfigureMultipartRequest(self, http_request):
"""Configure http_request as a multipart request for this upload."""
# This is a multipart/related upload.
msg_root = mime_multipart.MIMEMultipart('related')
# msg_root should not write out its own headers
setattr(msg_root, '_write_headers', lambda self: None)
# attach the body as one part
msg = mime_nonmultipart.MIMENonMultipart(
*http_request.headers['content-type'].split('/'))
msg.set_payload(http_request.body)
msg_root.attach(msg)
# attach the media as the second part
msg = mime_nonmultipart.MIMENonMultipart(*self.mime_type.split('/'))
msg['Content-Transfer-Encoding'] = 'binary'
msg.set_payload(self.stream.read())
msg_root.attach(msg)
# NOTE: We encode the body, but can't use
# `email.message.Message.as_string` because it prepends
# `> ` to `From ` lines.
fp = six.BytesIO()
if six.PY3:
generator_class = MultipartBytesGenerator
else:
generator_class = email_generator.Generator
g = generator_class(fp, mangle_from_=False)
g.flatten(msg_root, unixfrom=False)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | true |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/buffered_stream_test.py | apitools/base/py/buffered_stream_test.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for buffered_stream."""
import string
import unittest
import six
from apitools.base.py import buffered_stream
from apitools.base.py import exceptions
class BufferedStreamTest(unittest.TestCase):
def setUp(self):
self.stream = six.StringIO(string.ascii_letters)
self.value = self.stream.getvalue()
self.stream.seek(0)
def testEmptyBuffer(self):
bs = buffered_stream.BufferedStream(self.stream, 0, 0)
self.assertEqual('', bs.read(0))
self.assertEqual(0, bs.stream_end_position)
def testOffsetStream(self):
bs = buffered_stream.BufferedStream(self.stream, 50, 100)
self.assertEqual(len(self.value), len(bs))
self.assertEqual(self.value, bs.read(len(self.value)))
self.assertEqual(50 + len(self.value), bs.stream_end_position)
def testUnexhaustedStream(self):
bs = buffered_stream.BufferedStream(self.stream, 0, 50)
self.assertEqual(50, bs.stream_end_position)
self.assertEqual(False, bs.stream_exhausted)
self.assertEqual(self.value[0:50], bs.read(50))
self.assertEqual(False, bs.stream_exhausted)
self.assertEqual('', bs.read(0))
self.assertEqual('', bs.read(100))
def testExhaustedStream(self):
bs = buffered_stream.BufferedStream(self.stream, 0, 100)
self.assertEqual(len(self.value), bs.stream_end_position)
self.assertEqual(True, bs.stream_exhausted)
self.assertEqual(self.value, bs.read(100))
self.assertEqual('', bs.read(0))
self.assertEqual('', bs.read(100))
def testArbitraryLengthRead(self):
bs = buffered_stream.BufferedStream(self.stream, 0, 20)
with self.assertRaises(exceptions.NotYetImplementedError):
bs.read()
with self.assertRaises(exceptions.NotYetImplementedError):
bs.read(size=-1)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/batch.py | apitools/base/py/batch.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library for handling batch HTTP requests for apitools."""
import collections
import email.generator as generator
import email.mime.multipart as mime_multipart
import email.mime.nonmultipart as mime_nonmultipart
import email.parser as email_parser
import itertools
import time
import uuid
import six
from six.moves import http_client
from six.moves import urllib_parse
from six.moves import range # pylint: disable=redefined-builtin
from apitools.base.py import exceptions
from apitools.base.py import http_wrapper
__all__ = [
'BatchApiRequest',
]
class RequestResponseAndHandler(collections.namedtuple(
'RequestResponseAndHandler', ['request', 'response', 'handler'])):
"""Container for data related to completing an HTTP request.
This contains an HTTP request, its response, and a callback for handling
the response from the server.
Attributes:
request: An http_wrapper.Request object representing the HTTP request.
response: The http_wrapper.Response object returned from the server.
handler: A callback function accepting two arguments, response
and exception. Response is an http_wrapper.Response object, and
exception is an apiclient.errors.HttpError object if an error
occurred, or otherwise None.
"""
class BatchApiRequest(object):
"""Batches multiple api requests into a single request."""
class ApiCall(object):
"""Holds request and response information for each request.
ApiCalls are ultimately exposed to the client once the HTTP
batch request has been completed.
Attributes:
http_request: A client-supplied http_wrapper.Request to be
submitted to the server.
response: A http_wrapper.Response object given by the server as a
response to the user request, or None if an error occurred.
exception: An apiclient.errors.HttpError object if an error
occurred, or None.
"""
def __init__(self, request, retryable_codes, service, method_config):
"""Initialize an individual API request.
Args:
request: An http_wrapper.Request object.
retryable_codes: A list of integer HTTP codes that can
be retried.
service: A service inheriting from base_api.BaseApiService.
method_config: Method config for the desired API request.
"""
self.__retryable_codes = list(
set(retryable_codes + [http_client.UNAUTHORIZED]))
self.__http_response = None
self.__service = service
self.__method_config = method_config
self.http_request = request
# TODO(user): Add some validation to these fields.
self.__response = None
self.__exception = None
@property
def is_error(self):
return self.exception is not None
@property
def response(self):
return self.__response
@property
def exception(self):
return self.__exception
@property
def authorization_failed(self):
return (self.__http_response and (
self.__http_response.status_code == http_client.UNAUTHORIZED))
@property
def terminal_state(self):
if self.__http_response is None:
return False
response_code = self.__http_response.status_code
return response_code not in self.__retryable_codes
def HandleResponse(self, http_response, exception):
"""Handles incoming http response to the request in http_request.
This is intended to be used as a callback function for
BatchHttpRequest.Add.
Args:
http_response: Deserialized http_wrapper.Response object.
exception: apiclient.errors.HttpError object if an error
occurred.
"""
self.__http_response = http_response
self.__exception = exception
if self.terminal_state and not self.__exception:
self.__response = self.__service.ProcessHttpResponse(
self.__method_config, self.__http_response)
def __init__(self, batch_url=None, retryable_codes=None,
response_encoding=None):
"""Initialize a batch API request object.
Args:
batch_url: Base URL for batch API calls.
retryable_codes: A list of integer HTTP codes that can be retried.
response_encoding: The encoding type of response content.
"""
self.api_requests = []
self.retryable_codes = retryable_codes or []
self.batch_url = batch_url or 'https://www.googleapis.com/batch'
self.response_encoding = response_encoding
def Add(self, service, method, request, global_params=None):
"""Add a request to the batch.
Args:
service: A class inheriting base_api.BaseApiService.
method: A string indicated desired method from the service. See
the example in the class docstring.
request: An input message appropriate for the specified
service.method.
global_params: Optional additional parameters to pass into
method.PrepareHttpRequest.
Returns:
None
"""
# Retrieve the configs for the desired method and service.
method_config = service.GetMethodConfig(method)
upload_config = service.GetUploadConfig(method)
# Prepare the HTTP Request.
http_request = service.PrepareHttpRequest(
method_config, request, global_params=global_params,
upload_config=upload_config)
# Create the request and add it to our master list.
api_request = self.ApiCall(
http_request, self.retryable_codes, service, method_config)
self.api_requests.append(api_request)
def Execute(self, http, sleep_between_polls=5, max_retries=5,
max_batch_size=None, batch_request_callback=None):
"""Execute all of the requests in the batch.
Args:
http: httplib2.Http object for use in the request.
sleep_between_polls: Integer number of seconds to sleep between
polls.
max_retries: Max retries. Any requests that have not succeeded by
this number of retries simply report the last response or
exception, whatever it happened to be.
max_batch_size: int, if specified requests will be split in batches
of given size.
batch_request_callback: function of (http_response, exception) passed
to BatchHttpRequest which will be run on any given results.
Returns:
List of ApiCalls.
"""
requests = [request for request in self.api_requests
if not request.terminal_state]
batch_size = max_batch_size or len(requests)
for attempt in range(max_retries):
if attempt:
time.sleep(sleep_between_polls)
for i in range(0, len(requests), batch_size):
# Create a batch_http_request object and populate it with
# incomplete requests.
batch_http_request = BatchHttpRequest(
batch_url=self.batch_url,
callback=batch_request_callback,
response_encoding=self.response_encoding
)
for request in itertools.islice(requests,
i, i + batch_size):
batch_http_request.Add(
request.http_request, request.HandleResponse)
batch_http_request.Execute(http)
if hasattr(http.request, 'credentials'):
if any(request.authorization_failed
for request in itertools.islice(requests,
i, i + batch_size)):
http.request.credentials.refresh(http)
# Collect retryable requests.
requests = [request for request in self.api_requests if not
request.terminal_state]
if not requests:
break
return self.api_requests
class BatchHttpRequest(object):
"""Batches multiple http_wrapper.Request objects into a single request."""
def __init__(self, batch_url, callback=None, response_encoding=None):
"""Constructor for a BatchHttpRequest.
Args:
batch_url: URL to send batch requests to.
callback: A callback to be called for each response, of the
form callback(response, exception). The first parameter is
the deserialized Response object. The second is an
apiclient.errors.HttpError exception object if an HTTP error
occurred while processing the request, or None if no error
occurred.
response_encoding: The encoding type of response content.
"""
# Endpoint to which these requests are sent.
self.__batch_url = batch_url
# Global callback to be called for each individual response in the
# batch.
self.__callback = callback
# Response content will be decoded if this is provided.
self.__response_encoding = response_encoding
# List of requests, responses and handlers.
self.__request_response_handlers = {}
# The last auto generated id.
self.__last_auto_id = itertools.count()
# Unique ID on which to base the Content-ID headers.
self.__base_id = uuid.uuid4()
def _ConvertIdToHeader(self, request_id):
"""Convert an id to a Content-ID header value.
Args:
request_id: String identifier for a individual request.
Returns:
A Content-ID header with the id_ encoded into it. A UUID is
prepended to the value because Content-ID headers are
supposed to be universally unique.
"""
return '<%s+%s>' % (self.__base_id, urllib_parse.quote(request_id))
@staticmethod
def _ConvertHeaderToId(header):
"""Convert a Content-ID header value to an id.
Presumes the Content-ID header conforms to the format that
_ConvertIdToHeader() returns.
Args:
header: A string indicating the Content-ID header value.
Returns:
The extracted id value.
Raises:
BatchError if the header is not in the expected format.
"""
if not (header.startswith('<') or header.endswith('>')):
raise exceptions.BatchError(
'Invalid value for Content-ID: %s' % header)
if '+' not in header:
raise exceptions.BatchError(
'Invalid value for Content-ID: %s' % header)
_, request_id = header[1:-1].rsplit('+', 1)
return urllib_parse.unquote(request_id)
def _SerializeRequest(self, request):
"""Convert a http_wrapper.Request object into a string.
Args:
request: A http_wrapper.Request to serialize.
Returns:
The request as a string in application/http format.
"""
# Construct status line
parsed = urllib_parse.urlsplit(request.url)
request_line = urllib_parse.urlunsplit(
('', '', parsed.path, parsed.query, ''))
if not isinstance(request_line, six.text_type):
request_line = request_line.decode('utf-8')
status_line = u' '.join((
request.http_method,
request_line,
u'HTTP/1.1\n'
))
major, minor = request.headers.get(
'content-type', 'application/json').split('/')
msg = mime_nonmultipart.MIMENonMultipart(major, minor)
# MIMENonMultipart adds its own Content-Type header.
# Keep all of the other headers in `request.headers`.
for key, value in request.headers.items():
if key == 'content-type':
continue
msg[key] = value
msg['Host'] = parsed.netloc
msg.set_unixfrom(None)
if request.body is not None:
msg.set_payload(request.body)
# Serialize the mime message.
str_io = six.StringIO()
# maxheaderlen=0 means don't line wrap headers.
gen = generator.Generator(str_io, maxheaderlen=0)
gen.flatten(msg, unixfrom=False)
body = str_io.getvalue()
return status_line + body
def _DeserializeResponse(self, payload):
"""Convert string into Response and content.
Args:
payload: Header and body string to be deserialized.
Returns:
A Response object
"""
# Strip off the status line.
status_line, payload = payload.split('\n', 1)
_, status, _ = status_line.split(' ', 2)
# Parse the rest of the response.
parser = email_parser.Parser()
msg = parser.parsestr(payload)
# Get the headers.
info = dict(msg)
info['status'] = status
# Create Response from the parsed headers.
content = msg.get_payload()
return http_wrapper.Response(info, content, self.__batch_url)
def _NewId(self):
"""Create a new id.
Auto incrementing number that avoids conflicts with ids already used.
Returns:
A new unique id string.
"""
return str(next(self.__last_auto_id))
def Add(self, request, callback=None):
"""Add a new request.
Args:
request: A http_wrapper.Request to add to the batch.
callback: A callback to be called for this response, of the
form callback(response, exception). The first parameter is the
deserialized response object. The second is an
apiclient.errors.HttpError exception object if an HTTP error
occurred while processing the request, or None if no errors
occurred.
Returns:
None
"""
handler = RequestResponseAndHandler(request, None, callback)
self.__request_response_handlers[self._NewId()] = handler
def _Execute(self, http):
"""Serialize batch request, send to server, process response.
Args:
http: A httplib2.Http object to be used to make the request with.
Raises:
httplib2.HttpLib2Error if a transport error has occured.
apiclient.errors.BatchError if the response is the wrong format.
"""
message = mime_multipart.MIMEMultipart('mixed')
# Message should not write out its own headers.
setattr(message, '_write_headers', lambda self: None)
# Add all the individual requests.
for key in self.__request_response_handlers:
msg = mime_nonmultipart.MIMENonMultipart('application', 'http')
msg['Content-Transfer-Encoding'] = 'binary'
msg['Content-ID'] = self._ConvertIdToHeader(key)
body = self._SerializeRequest(
self.__request_response_handlers[key].request)
msg.set_payload(body)
message.attach(msg)
request = http_wrapper.Request(self.__batch_url, 'POST')
request.body = message.as_string()
request.headers['content-type'] = (
'multipart/mixed; boundary="%s"') % message.get_boundary()
response = http_wrapper.MakeRequest(http, request)
if response.status_code >= 300:
raise exceptions.HttpError.FromResponse(response)
# Prepend with a content-type header so Parser can handle it.
header = 'content-type: %s\r\n\r\n' % response.info['content-type']
content = response.content
if isinstance(content, bytes) and self.__response_encoding:
content = response.content.decode(self.__response_encoding)
parser = email_parser.Parser()
mime_response = parser.parsestr(header + content)
if not mime_response.is_multipart():
raise exceptions.BatchError(
'Response not in multipart/mixed format.')
for part in mime_response.get_payload():
request_id = self._ConvertHeaderToId(part['Content-ID'])
response = self._DeserializeResponse(part.get_payload())
# Disable protected access because namedtuple._replace(...)
# is not actually meant to be protected.
# pylint: disable=protected-access
self.__request_response_handlers[request_id] = (
self.__request_response_handlers[request_id]._replace(
response=response))
def Execute(self, http):
"""Execute all the requests as a single batched HTTP request.
Args:
http: A httplib2.Http object to be used with the request.
Returns:
None
Raises:
BatchError if the response is the wrong format.
"""
self._Execute(http)
for key in self.__request_response_handlers:
response = self.__request_response_handlers[key].response
callback = self.__request_response_handlers[key].handler
exception = None
if response.status_code >= 300:
exception = exceptions.HttpError.FromResponse(response)
if callback is not None:
callback(response, exception)
if self.__callback is not None:
self.__callback(response, exception)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/compression_test.py | apitools/base/py/compression_test.py | #!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for compression."""
import unittest
from apitools.base.py import compression
from apitools.base.py import gzip
import six
class CompressionTest(unittest.TestCase):
def setUp(self):
# Sample highly compressible data (~50MB).
self.sample_data = b'abc' * 16777216
# Stream of the sample data.
self.stream = six.BytesIO()
self.stream.write(self.sample_data)
self.length = self.stream.tell()
self.stream.seek(0)
def testCompressionExhausted(self):
"""Test full compression.
Test that highly compressible data is actually compressed in entirety.
"""
output, read, exhausted = compression.CompressStream(
self.stream,
self.length,
9)
# Ensure the compressed buffer is smaller than the input buffer.
self.assertLess(output.length, self.length)
# Ensure we read the entire input stream.
self.assertEqual(read, self.length)
# Ensure the input stream was exhausted.
self.assertTrue(exhausted)
def testCompressionUnbounded(self):
"""Test unbounded compression.
Test that the input stream is exhausted when length is none.
"""
output, read, exhausted = compression.CompressStream(
self.stream,
None,
9)
# Ensure the compressed buffer is smaller than the input buffer.
self.assertLess(output.length, self.length)
# Ensure we read the entire input stream.
self.assertEqual(read, self.length)
# Ensure the input stream was exhausted.
self.assertTrue(exhausted)
def testCompressionPartial(self):
"""Test partial compression.
Test that the length parameter works correctly. The amount of data
that's compressed can be greater than or equal to the requested length.
"""
output_length = 40
output, _, exhausted = compression.CompressStream(
self.stream,
output_length,
9)
# Ensure the requested read size is <= the compressed buffer size.
self.assertLessEqual(output_length, output.length)
# Ensure the input stream was not exhausted.
self.assertFalse(exhausted)
def testCompressionIntegrity(self):
"""Test that compressed data can be decompressed."""
output, read, exhausted = compression.CompressStream(
self.stream,
self.length,
9)
# Ensure uncompressed data matches the sample data.
with gzip.GzipFile(fileobj=output) as f:
original = f.read()
self.assertEqual(original, self.sample_data)
# Ensure we read the entire input stream.
self.assertEqual(read, self.length)
# Ensure the input stream was exhausted.
self.assertTrue(exhausted)
class StreamingBufferTest(unittest.TestCase):
def setUp(self):
self.stream = compression.StreamingBuffer()
def testSimpleStream(self):
"""Test simple stream operations.
Test that the stream can be written to and read from. Also test that
reading from the stream consumes the bytes.
"""
# Ensure the stream is empty.
self.assertEqual(self.stream.length, 0)
# Ensure data is correctly written.
self.stream.write(b'Sample data')
self.assertEqual(self.stream.length, 11)
# Ensure data can be read and the read data is purged from the stream.
data = self.stream.read(11)
self.assertEqual(data, b'Sample data')
self.assertEqual(self.stream.length, 0)
def testPartialReads(self):
"""Test partial stream reads.
Test that the stream can be read in chunks while perserving the
consumption mechanics.
"""
self.stream.write(b'Sample data')
# Ensure data can be read and the read data is purged from the stream.
data = self.stream.read(6)
self.assertEqual(data, b'Sample')
self.assertEqual(self.stream.length, 5)
# Ensure the remaining data can be read.
data = self.stream.read(5)
self.assertEqual(data, b' data')
self.assertEqual(self.stream.length, 0)
def testTooShort(self):
"""Test excessive stream reads.
Test that more data can be requested from the stream than available
without raising an exception.
"""
self.stream.write(b'Sample')
# Ensure requesting more data than available does not raise an
# exception.
data = self.stream.read(100)
self.assertEqual(data, b'Sample')
self.assertEqual(self.stream.length, 0)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/testing/mock_test.py | apitools/base/py/testing/mock_test.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for apitools.base.py.testing.mock."""
import unittest
import httplib2
import six
from apitools.base.protorpclite import messages
from apitools.base.py import base_api
from apitools.base.py import exceptions
from apitools.base.py.testing import mock
from samples.fusiontables_sample.fusiontables_v1 import \
fusiontables_v1_client as fusiontables
from samples.fusiontables_sample.fusiontables_v1 import \
fusiontables_v1_messages as fusiontables_messages
def _GetApiServices(api_client_class):
return dict(
(name, potential_service)
for name, potential_service in six.iteritems(api_client_class.__dict__)
if (isinstance(potential_service, type) and
issubclass(potential_service, base_api.BaseApiService)))
class CustomException(Exception):
pass
class MockTest(unittest.TestCase):
def testMockFusionBasic(self):
with mock.Client(fusiontables.FusiontablesV1) as client_class:
client_class.column.List.Expect(
request=1, response=2, enable_type_checking=False)
client = fusiontables.FusiontablesV1(get_credentials=False)
self.assertEqual(client.column.List(1), 2)
with self.assertRaises(mock.UnexpectedRequestException):
client.column.List(3)
def testMockFusionException(self):
with mock.Client(fusiontables.FusiontablesV1) as client_class:
client_class.column.List.Expect(
request=1,
exception=exceptions.HttpError({'status': 404}, '', ''),
enable_type_checking=False)
client = fusiontables.FusiontablesV1(get_credentials=False)
with self.assertRaises(exceptions.HttpError):
client.column.List(1)
def testMockFusionTypeChecking(self):
with mock.Client(fusiontables.FusiontablesV1) as client_class:
messages = client_class.MESSAGES_MODULE
client_class.column.List.Expect(
messages.FusiontablesColumnListRequest(tableId='foo'),
messages.ColumnList(items=[], totalItems=0))
client = fusiontables.FusiontablesV1(get_credentials=False)
self.assertEqual(
client.column.List(
messages.FusiontablesColumnListRequest(tableId='foo')),
messages.ColumnList(items=[], totalItems=0))
def testMockFusionTypeCheckingErrors(self):
with mock.Client(fusiontables.FusiontablesV1) as client_class:
messages = client_class.MESSAGES_MODULE
# Wrong request type.
with self.assertRaises(exceptions.ConfigurationValueError):
client_class.column.List.Expect(
messages.FusiontablesColumnInsertRequest(),
messages.ColumnList(items=[], totalItems=0))
# Wrong response type.
with self.assertRaises(exceptions.ConfigurationValueError):
client_class.column.List.Expect(
messages.FusiontablesColumnListRequest(tableId='foo'),
messages.Column())
# No error if checking is disabled.
client_class.column.List.Expect(
messages.FusiontablesColumnInsertRequest(),
messages.Column(),
enable_type_checking=False)
client_class.column.List(
messages.FusiontablesColumnInsertRequest())
def testMockIfAnotherException(self):
with self.assertRaises(CustomException):
with mock.Client(fusiontables.FusiontablesV1) as client_class:
client_class.column.List.Expect(
request=1, response=2, enable_type_checking=False)
raise CustomException('Something when wrong')
def testMockFusionOrder(self):
with mock.Client(fusiontables.FusiontablesV1) as client_class:
client_class.column.List.Expect(
request=1, response=2, enable_type_checking=False)
client_class.column.List.Expect(
request=2, response=1, enable_type_checking=False)
client = fusiontables.FusiontablesV1(get_credentials=False)
self.assertEqual(client.column.List(1), 2)
self.assertEqual(client.column.List(2), 1)
def testMockFusionWrongOrder(self):
with mock.Client(fusiontables.FusiontablesV1) as client_class:
client_class.column.List.Expect(
request=1, response=2, enable_type_checking=False)
client_class.column.List.Expect(
request=2, response=1, enable_type_checking=False)
client = fusiontables.FusiontablesV1(get_credentials=False)
with self.assertRaises(mock.UnexpectedRequestException):
self.assertEqual(client.column.List(2), 1)
with self.assertRaises(mock.UnexpectedRequestException):
self.assertEqual(client.column.List(1), 2)
def testMockFusionTooMany(self):
with mock.Client(fusiontables.FusiontablesV1) as client_class:
client_class.column.List.Expect(
request=1, response=2, enable_type_checking=False)
client = fusiontables.FusiontablesV1(get_credentials=False)
self.assertEqual(client.column.List(1), 2)
with self.assertRaises(mock.UnexpectedRequestException):
self.assertEqual(client.column.List(2), 1)
def testMockFusionTooFew(self):
with self.assertRaises(mock.ExpectedRequestsException):
with mock.Client(fusiontables.FusiontablesV1) as client_class:
client_class.column.List.Expect(
request=1, response=2, enable_type_checking=False)
client_class.column.List.Expect(
request=2, response=1, enable_type_checking=False)
client = fusiontables.FusiontablesV1(get_credentials=False)
self.assertEqual(client.column.List(1), 2)
def testFusionUnmock(self):
with mock.Client(fusiontables.FusiontablesV1):
client = fusiontables.FusiontablesV1(get_credentials=False)
mocked_service_type = type(client.column)
client = fusiontables.FusiontablesV1(get_credentials=False)
self.assertNotEqual(type(client.column), mocked_service_type)
def testRequestMacher(self):
class Matcher(object):
def __init__(self, eq):
self._eq = eq
def __eq__(self, other):
return self._eq(other)
with mock.Client(fusiontables.FusiontablesV1) as client_class:
def IsEven(x):
return x % 2 == 0
def IsOdd(x):
return not IsEven(x)
client_class.column.List.Expect(
request=Matcher(IsEven), response=1,
enable_type_checking=False)
client_class.column.List.Expect(
request=Matcher(IsOdd), response=2, enable_type_checking=False)
client_class.column.List.Expect(
request=Matcher(IsEven), response=3,
enable_type_checking=False)
client_class.column.List.Expect(
request=Matcher(IsOdd), response=4, enable_type_checking=False)
client = fusiontables.FusiontablesV1(get_credentials=False)
self.assertEqual(client.column.List(2), 1)
self.assertEqual(client.column.List(1), 2)
self.assertEqual(client.column.List(20), 3)
self.assertEqual(client.column.List(23), 4)
def testClientUnmock(self):
mock_client = mock.Client(fusiontables.FusiontablesV1)
self.assertFalse(isinstance(mock_client, fusiontables.FusiontablesV1))
attributes = set(mock_client.__dict__.keys())
mock_client = mock_client.Mock()
self.assertTrue(isinstance(mock_client, fusiontables.FusiontablesV1))
self.assertTrue(set(mock_client.__dict__.keys()) - attributes)
mock_client.Unmock()
self.assertFalse(isinstance(mock_client, fusiontables.FusiontablesV1))
self.assertEqual(attributes, set(mock_client.__dict__.keys()))
def testMockHasMessagesModule(self):
with mock.Client(fusiontables.FusiontablesV1) as mock_client:
self.assertEqual(fusiontables_messages,
mock_client.MESSAGES_MODULE)
def testMockHasUrlProperty(self):
with mock.Client(fusiontables.FusiontablesV1) as mock_client:
self.assertEqual(fusiontables.FusiontablesV1.BASE_URL,
mock_client.url)
self.assertFalse(hasattr(mock_client, 'url'))
def testMockHasOverrideUrlProperty(self):
real_client = fusiontables.FusiontablesV1(url='http://localhost:8080',
get_credentials=False)
with mock.Client(fusiontables.FusiontablesV1,
real_client) as mock_client:
self.assertEqual('http://localhost:8080/', mock_client.url)
def testMockHasHttpProperty(self):
with mock.Client(fusiontables.FusiontablesV1) as mock_client:
self.assertIsInstance(mock_client.http, httplib2.Http)
self.assertFalse(hasattr(mock_client, 'http'))
def testMockHasOverrideHttpProperty(self):
real_client = fusiontables.FusiontablesV1(url='http://localhost:8080',
http='SomeHttpObject',
get_credentials=False)
with mock.Client(fusiontables.FusiontablesV1,
real_client) as mock_client:
self.assertEqual('SomeHttpObject', mock_client.http)
def testMockPreservesServiceMethods(self):
services = _GetApiServices(fusiontables.FusiontablesV1)
with mock.Client(fusiontables.FusiontablesV1):
mocked_services = _GetApiServices(fusiontables.FusiontablesV1)
self.assertEqual(services.keys(), mocked_services.keys())
for name, service in six.iteritems(services):
mocked_service = mocked_services[name]
methods = service.GetMethodsList()
for method in methods:
mocked_method = getattr(mocked_service, method)
mocked_method_config = mocked_method.method_config()
method_config = getattr(service, method).method_config()
self.assertEqual(method_config, mocked_method_config)
class _NestedMessage(messages.Message):
nested = messages.StringField(1)
class _NestedListMessage(messages.Message):
nested_list = messages.MessageField(_NestedMessage, 1, repeated=True)
class _NestedNestedMessage(messages.Message):
nested = messages.MessageField(_NestedMessage, 1)
class UtilTest(unittest.TestCase):
def testMessagesEqual(self):
self.assertFalse(mock._MessagesEqual(
_NestedNestedMessage(
nested=_NestedMessage(
nested='foo')),
_NestedNestedMessage(
nested=_NestedMessage(
nested='bar'))))
self.assertTrue(mock._MessagesEqual(
_NestedNestedMessage(
nested=_NestedMessage(
nested='foo')),
_NestedNestedMessage(
nested=_NestedMessage(
nested='foo'))))
def testListedMessagesEqual(self):
self.assertTrue(mock._MessagesEqual(
_NestedListMessage(
nested_list=[_NestedMessage(nested='foo')]),
_NestedListMessage(
nested_list=[_NestedMessage(nested='foo')])))
self.assertTrue(mock._MessagesEqual(
_NestedListMessage(
nested_list=[_NestedMessage(nested='foo'),
_NestedMessage(nested='foo2')]),
_NestedListMessage(
nested_list=[_NestedMessage(nested='foo'),
_NestedMessage(nested='foo2')])))
self.assertFalse(mock._MessagesEqual(
_NestedListMessage(
nested_list=[_NestedMessage(nested='foo')]),
_NestedListMessage(
nested_list=[_NestedMessage(nested='bar')])))
self.assertFalse(mock._MessagesEqual(
_NestedListMessage(
nested_list=[_NestedMessage(nested='foo')]),
_NestedListMessage(
nested_list=[_NestedMessage(nested='foo'),
_NestedMessage(nested='foo')])))
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/testing/mock.py | apitools/base/py/testing/mock.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The mock module allows easy mocking of apitools clients.
This module allows you to mock out the constructor of a particular apitools
client, for a specific API and version. Then, when the client is created, it
will be run against an expected session that you define. This way code that is
not aware of the testing framework can construct new clients as normal, as long
as it's all done within the context of a mock.
"""
import difflib
import sys
import six
from apitools.base.protorpclite import messages
from apitools.base.py import base_api
from apitools.base.py import encoding
from apitools.base.py import exceptions
class Error(Exception):
"""Exceptions for this module."""
def _MessagesEqual(msg1, msg2):
"""Compare two protorpc messages for equality.
Using python's == operator does not work in all cases, specifically when
there is a list involved.
Args:
msg1: protorpc.messages.Message or [protorpc.messages.Message] or number
or string, One of the messages to compare.
msg2: protorpc.messages.Message or [protorpc.messages.Message] or number
or string, One of the messages to compare.
Returns:
If the messages are isomorphic.
"""
if isinstance(msg1, list) and isinstance(msg2, list):
if len(msg1) != len(msg2):
return False
return all(_MessagesEqual(x, y) for x, y in zip(msg1, msg2))
if (not isinstance(msg1, messages.Message) or
not isinstance(msg2, messages.Message)):
return msg1 == msg2
for field in msg1.all_fields():
field1 = getattr(msg1, field.name)
field2 = getattr(msg2, field.name)
if not _MessagesEqual(field1, field2):
return False
return True
class UnexpectedRequestException(Error):
def __init__(self, received_call, expected_call):
expected_key, expected_request = expected_call
received_key, received_request = received_call
expected_repr = encoding.MessageToRepr(
expected_request, multiline=True)
received_repr = encoding.MessageToRepr(
received_request, multiline=True)
expected_lines = expected_repr.splitlines()
received_lines = received_repr.splitlines()
diff_lines = difflib.unified_diff(expected_lines, received_lines)
diff = '\n'.join(diff_lines)
if expected_key != received_key:
msg = '\n'.join((
'expected: {expected_key}({expected_request})',
'received: {received_key}({received_request})',
'',
)).format(
expected_key=expected_key,
expected_request=expected_repr,
received_key=received_key,
received_request=received_repr)
super(UnexpectedRequestException, self).__init__(msg)
else:
msg = '\n'.join((
'for request to {key},',
'expected: {expected_request}',
'received: {received_request}',
'diff: {diff}',
'',
)).format(
key=expected_key,
expected_request=expected_repr,
received_request=received_repr,
diff=diff)
super(UnexpectedRequestException, self).__init__(msg)
class ExpectedRequestsException(Error):
def __init__(self, expected_calls):
msg = 'expected:\n'
for (key, request) in expected_calls:
msg += '{key}({request})\n'.format(
key=key,
request=encoding.MessageToRepr(request, multiline=True))
super(ExpectedRequestsException, self).__init__(msg)
class _ExpectedRequestResponse(object):
"""Encapsulation of an expected request and corresponding response."""
def __init__(self, key, request, response=None, exception=None):
self.__key = key
self.__request = request
if response and exception:
raise exceptions.ConfigurationValueError(
'Should specify at most one of response and exception')
if response and isinstance(response, exceptions.Error):
raise exceptions.ConfigurationValueError(
'Responses should not be an instance of Error')
if exception and not isinstance(exception, exceptions.Error):
raise exceptions.ConfigurationValueError(
'Exceptions must be instances of Error')
self.__response = response
self.__exception = exception
@property
def key(self):
return self.__key
@property
def request(self):
return self.__request
def ValidateAndRespond(self, key, request):
"""Validate that key and request match expectations, and respond if so.
Args:
key: str, Actual key to compare against expectations.
request: protorpc.messages.Message or [protorpc.messages.Message]
or number or string, Actual request to compare againt expectations
Raises:
UnexpectedRequestException: If key or request dont match
expectations.
apitools_base.Error: If a non-None exception is specified to
be thrown.
Returns:
The response that was specified to be returned.
"""
if key != self.__key or not (self.__request == request or
_MessagesEqual(request, self.__request)):
raise UnexpectedRequestException((key, request),
(self.__key, self.__request))
if self.__exception:
# Can only throw apitools_base.Error.
raise self.__exception # pylint: disable=raising-bad-type
return self.__response
class _MockedMethod(object):
"""A mocked API service method."""
def __init__(self, key, mocked_client, real_method):
self.__name__ = real_method.__name__
self.__key = key
self.__mocked_client = mocked_client
self.__real_method = real_method
self.method_config = real_method.method_config
config = self.method_config()
self.__request_type = getattr(self.__mocked_client.MESSAGES_MODULE,
config.request_type_name)
self.__response_type = getattr(self.__mocked_client.MESSAGES_MODULE,
config.response_type_name)
def _TypeCheck(self, msg, is_request):
"""Ensure the given message is of the expected type of this method.
Args:
msg: The message instance to check.
is_request: True to validate against the expected request type,
False to validate against the expected response type.
Raises:
exceptions.ConfigurationValueError: If the type of the message was
not correct.
"""
if is_request:
mode = 'request'
real_type = self.__request_type
else:
mode = 'response'
real_type = self.__response_type
if not isinstance(msg, real_type):
raise exceptions.ConfigurationValueError(
'Expected {} is not of the correct type for method [{}].\n'
' Required: [{}]\n'
' Given: [{}]'.format(
mode, self.__key, real_type, type(msg)))
def Expect(self, request, response=None, exception=None,
enable_type_checking=True, **unused_kwargs):
"""Add an expectation on the mocked method.
Exactly one of response and exception should be specified.
Args:
request: The request that should be expected
response: The response that should be returned or None if
exception is provided.
exception: An exception that should be thrown, or None.
enable_type_checking: When true, the message type of the request
and response (if provided) will be checked against the types
required by this method.
"""
# TODO(jasmuth): the unused_kwargs provides a placeholder for
# future things that can be passed to Expect(), like special
# params to the method call.
# Ensure that the registered request and response mocks actually
# match what this method accepts and returns.
if enable_type_checking:
self._TypeCheck(request, is_request=True)
if response:
self._TypeCheck(response, is_request=False)
# pylint: disable=protected-access
# Class in same module.
self.__mocked_client._request_responses.append(
_ExpectedRequestResponse(self.__key,
request,
response=response,
exception=exception))
# pylint: enable=protected-access
def __call__(self, request, **unused_kwargs):
# TODO(jasmuth): allow the testing code to expect certain
# values in these currently unused_kwargs, especially the
# upload parameter used by media-heavy services like bigquery
# or bigstore.
# pylint: disable=protected-access
# Class in same module.
if self.__mocked_client._request_responses:
request_response = self.__mocked_client._request_responses.pop(0)
else:
raise UnexpectedRequestException(
(self.__key, request), (None, None))
# pylint: enable=protected-access
response = request_response.ValidateAndRespond(self.__key, request)
if response is None and self.__real_method:
response = self.__real_method(request)
print(encoding.MessageToRepr(
response, multiline=True, shortstrings=True))
return response
return response
def _MakeMockedService(api_name, collection_name,
mock_client, service, real_service):
class MockedService(base_api.BaseApiService):
pass
for method in service.GetMethodsList():
real_method = None
if real_service:
real_method = getattr(real_service, method)
setattr(MockedService,
method,
_MockedMethod(api_name + '.' + collection_name + '.' + method,
mock_client,
real_method))
return MockedService
class Client(object):
"""Mock an apitools client."""
def __init__(self, client_class, real_client=None):
"""Mock an apitools API, given its class.
Args:
client_class: The class for the API. eg, if you
from apis.sqladmin import v1beta3
then you can pass v1beta3.SqladminV1beta3 to this class
and anything within its context will use your mocked
version.
real_client: apitools Client, The client to make requests
against when the expected response is None.
"""
if not real_client:
real_client = client_class(get_credentials=False)
self.__orig_class = self.__class__
self.__client_class = client_class
self.__real_service_classes = {}
self.__real_client = real_client
self._request_responses = []
self.__real_include_fields = None
def __enter__(self):
return self.Mock()
def Mock(self):
"""Stub out the client class with mocked services."""
client = self.__real_client or self.__client_class(
get_credentials=False)
class Patched(self.__class__, self.__client_class):
pass
self.__class__ = Patched
for name in dir(self.__client_class):
service_class = getattr(self.__client_class, name)
if not isinstance(service_class, type):
continue
if not issubclass(service_class, base_api.BaseApiService):
continue
self.__real_service_classes[name] = service_class
# pylint: disable=protected-access
collection_name = service_class._NAME
# pylint: enable=protected-access
api_name = '%s_%s' % (self.__client_class._PACKAGE,
self.__client_class._URL_VERSION)
mocked_service_class = _MakeMockedService(
api_name, collection_name, self,
service_class,
service_class(client) if self.__real_client else None)
setattr(self.__client_class, name, mocked_service_class)
setattr(self, collection_name, mocked_service_class(self))
self.__real_include_fields = self.__client_class.IncludeFields
self.__client_class.IncludeFields = self.IncludeFields
# pylint: disable=attribute-defined-outside-init
self._url = client._url
self._http = client._http
return self
def __exit__(self, exc_type, value, traceback):
is_active_exception = value is not None
self.Unmock(suppress=is_active_exception)
if is_active_exception:
six.reraise(exc_type, value, traceback)
return True
def Unmock(self, suppress=False):
self.__class__ = self.__orig_class
for name, service_class in self.__real_service_classes.items():
setattr(self.__client_class, name, service_class)
delattr(self, service_class._NAME)
self.__real_service_classes = {}
del self._url
del self._http
self.__client_class.IncludeFields = self.__real_include_fields
self.__real_include_fields = None
requests = [(rq_rs.key, rq_rs.request)
for rq_rs in self._request_responses]
self._request_responses = []
if requests and not suppress and sys.exc_info()[1] is None:
raise ExpectedRequestsException(requests)
def IncludeFields(self, include_fields):
if self.__real_client:
return self.__real_include_fields(self.__real_client,
include_fields)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/py/testing/__init__.py | apitools/base/py/testing/__init__.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package marker file."""
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/protorpclite/protojson.py | apitools/base/protorpclite/protojson.py | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""JSON support for message types.
Public classes:
MessageJSONEncoder: JSON encoder for message objects.
Public functions:
encode_message: Encodes a message in to a JSON string.
decode_message: Merge from a JSON string in to a message.
"""
import base64
import binascii
import logging
import six
from apitools.base.protorpclite import message_types
from apitools.base.protorpclite import messages
from apitools.base.protorpclite import util
__all__ = [
'ALTERNATIVE_CONTENT_TYPES',
'CONTENT_TYPE',
'MessageJSONEncoder',
'encode_message',
'decode_message',
'ProtoJson',
]
def _load_json_module():
"""Try to load a valid json module.
There are more than one json modules that might be installed. They are
mostly compatible with one another but some versions may be different.
This function attempts to load various json modules in a preferred order.
It does a basic check to guess if a loaded version of json is compatible.
Returns:
Compatible json module.
Raises:
ImportError if there are no json modules or the loaded json module is
not compatible with ProtoRPC.
"""
first_import_error = None
for module_name in ['json',
'simplejson']:
try:
module = __import__(module_name, {}, {}, 'json')
if not hasattr(module, 'JSONEncoder'):
message = (
'json library "%s" is not compatible with ProtoRPC' %
module_name)
logging.warning(message)
raise ImportError(message)
else:
return module
except ImportError as err:
if not first_import_error:
first_import_error = err
logging.error('Must use valid json library (json or simplejson)')
raise first_import_error # pylint:disable=raising-bad-type
json = _load_json_module()
# TODO: Rename this to MessageJsonEncoder.
class MessageJSONEncoder(json.JSONEncoder):
"""Message JSON encoder class.
Extension of JSONEncoder that can build JSON from a message object.
"""
def __init__(self, protojson_protocol=None, **kwargs):
"""Constructor.
Args:
protojson_protocol: ProtoJson instance.
"""
super(MessageJSONEncoder, self).__init__(**kwargs)
self.__protojson_protocol = (
protojson_protocol or ProtoJson.get_default())
def default(self, value):
"""Return dictionary instance from a message object.
Args:
value: Value to get dictionary for. If not encodable, will
call superclasses default method.
"""
if isinstance(value, messages.Enum):
return str(value)
if six.PY3 and isinstance(value, bytes):
return value.decode('utf8')
if isinstance(value, messages.Message):
result = {}
for field in value.all_fields():
item = value.get_assigned_value(field.name)
if item not in (None, [], ()):
result[field.name] = (
self.__protojson_protocol.encode_field(field, item))
# Handle unrecognized fields, so they're included when a message is
# decoded then encoded.
for unknown_key in value.all_unrecognized_fields():
unrecognized_field, _ = value.get_unrecognized_field_info(
unknown_key)
# Unknown fields are not encoded as they should have been
# processed before we get to here.
result[unknown_key] = unrecognized_field
return result
return super(MessageJSONEncoder, self).default(value)
class ProtoJson(object):
"""ProtoRPC JSON implementation class.
Implementation of JSON based protocol used for serializing and
deserializing message objects. Instances of remote.ProtocolConfig
constructor or used with remote.Protocols.add_protocol. See the
remote.py module for more details.
"""
CONTENT_TYPE = 'application/json'
ALTERNATIVE_CONTENT_TYPES = [
'application/x-javascript',
'text/javascript',
'text/x-javascript',
'text/x-json',
'text/json',
]
def encode_field(self, field, value):
"""Encode a python field value to a JSON value.
Args:
field: A ProtoRPC field instance.
value: A python value supported by field.
Returns:
A JSON serializable value appropriate for field.
"""
if isinstance(field, messages.BytesField):
if field.repeated:
value = [base64.b64encode(byte) for byte in value]
else:
value = base64.b64encode(value)
elif isinstance(field, message_types.DateTimeField):
# DateTimeField stores its data as a RFC 3339 compliant string.
if field.repeated:
value = [i.isoformat() for i in value]
else:
value = value.isoformat()
return value
def encode_message(self, message):
"""Encode Message instance to JSON string.
Args:
Message instance to encode in to JSON string.
Returns:
String encoding of Message instance in protocol JSON format.
Raises:
messages.ValidationError if message is not initialized.
"""
message.check_initialized()
return json.dumps(message, cls=MessageJSONEncoder,
protojson_protocol=self)
def decode_message(self, message_type, encoded_message):
"""Merge JSON structure to Message instance.
Args:
message_type: Message to decode data to.
encoded_message: JSON encoded version of message.
Returns:
Decoded instance of message_type.
Raises:
ValueError: If encoded_message is not valid JSON.
messages.ValidationError if merged message is not initialized.
"""
encoded_message = six.ensure_str(encoded_message)
if not encoded_message.strip():
return message_type()
dictionary = json.loads(encoded_message)
message = self.__decode_dictionary(message_type, dictionary)
message.check_initialized()
return message
def __find_variant(self, value):
"""Find the messages.Variant type that describes this value.
Args:
value: The value whose variant type is being determined.
Returns:
The messages.Variant value that best describes value's type,
or None if it's a type we don't know how to handle.
"""
if isinstance(value, bool):
return messages.Variant.BOOL
elif isinstance(value, six.integer_types):
return messages.Variant.INT64
elif isinstance(value, float):
return messages.Variant.DOUBLE
elif isinstance(value, six.string_types):
return messages.Variant.STRING
elif isinstance(value, (list, tuple)):
# Find the most specific variant that covers all elements.
variant_priority = [None,
messages.Variant.INT64,
messages.Variant.DOUBLE,
messages.Variant.STRING]
chosen_priority = 0
for v in value:
variant = self.__find_variant(v)
try:
priority = variant_priority.index(variant)
except IndexError:
priority = -1
if priority > chosen_priority:
chosen_priority = priority
return variant_priority[chosen_priority]
# Unrecognized type.
return None
def __decode_dictionary(self, message_type, dictionary):
"""Merge dictionary in to message.
Args:
message: Message to merge dictionary in to.
dictionary: Dictionary to extract information from. Dictionary
is as parsed from JSON. Nested objects will also be dictionaries.
"""
message = message_type()
for key, value in six.iteritems(dictionary):
if value is None:
try:
message.reset(key)
except AttributeError:
pass # This is an unrecognized field, skip it.
continue
try:
field = message.field_by_name(key)
except KeyError:
# Save unknown values.
variant = self.__find_variant(value)
if variant:
message.set_unrecognized_field(key, value, variant)
continue
is_enum_field = isinstance(field, messages.EnumField)
is_unrecognized_field = False
if field.repeated:
# This should be unnecessary? Or in fact become an error.
if not isinstance(value, list):
value = [value]
valid_value = []
for item in value:
try:
v = self.decode_field(field, item)
if is_enum_field and v is None:
continue
except messages.DecodeError:
if not is_enum_field:
raise
is_unrecognized_field = True
continue
valid_value.append(v)
setattr(message, field.name, valid_value)
if is_unrecognized_field:
variant = self.__find_variant(value)
if variant:
message.set_unrecognized_field(key, value, variant)
continue
# This is just for consistency with the old behavior.
if value == []:
continue
try:
setattr(message, field.name, self.decode_field(field, value))
except messages.DecodeError:
# Save unknown enum values.
if not is_enum_field:
raise
variant = self.__find_variant(value)
if variant:
message.set_unrecognized_field(key, value, variant)
return message
def decode_field(self, field, value):
"""Decode a JSON value to a python value.
Args:
field: A ProtoRPC field instance.
value: A serialized JSON value.
Return:
A Python value compatible with field.
"""
if isinstance(field, messages.EnumField):
try:
return field.type(value)
except TypeError:
raise messages.DecodeError(
'Invalid enum value "%s"' % (value or ''))
elif isinstance(field, messages.BytesField):
try:
return base64.b64decode(value)
except (binascii.Error, TypeError) as err:
raise messages.DecodeError('Base64 decoding error: %s' % err)
elif isinstance(field, message_types.DateTimeField):
try:
return util.decode_datetime(value, truncate_time=True)
except ValueError as err:
raise messages.DecodeError(err)
elif (isinstance(field, messages.MessageField) and
issubclass(field.type, messages.Message)):
return self.__decode_dictionary(field.type, value)
elif (isinstance(field, messages.FloatField) and
isinstance(value, (six.integer_types, six.string_types))):
try:
return float(value)
except: # pylint:disable=bare-except
pass
elif (isinstance(field, messages.IntegerField) and
isinstance(value, six.string_types)):
try:
return int(value)
except: # pylint:disable=bare-except
pass
return value
@staticmethod
def get_default():
"""Get default instanceof ProtoJson."""
try:
return ProtoJson.__default
except AttributeError:
ProtoJson.__default = ProtoJson()
return ProtoJson.__default
@staticmethod
def set_default(protocol):
"""Set the default instance of ProtoJson.
Args:
protocol: A ProtoJson instance.
"""
if not isinstance(protocol, ProtoJson):
raise TypeError('Expected protocol of type ProtoJson')
ProtoJson.__default = protocol
CONTENT_TYPE = ProtoJson.CONTENT_TYPE
ALTERNATIVE_CONTENT_TYPES = ProtoJson.ALTERNATIVE_CONTENT_TYPES
encode_message = ProtoJson.get_default().encode_message
decode_message = ProtoJson.get_default().decode_message
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/protorpclite/descriptor.py | apitools/base/protorpclite/descriptor.py | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Services descriptor definitions.
Contains message definitions and functions for converting
service classes into transmittable message format.
Describing an Enum instance, Enum class, Field class or Message class will
generate an appropriate descriptor object that describes that class.
This message can itself be used to transmit information to clients wishing
to know the description of an enum value, enum, field or message without
needing to download the source code. This format is also compatible with
other, non-Python languages.
The descriptors are modeled to be binary compatible with
https://github.com/google/protobuf
NOTE: The names of types and fields are not always the same between these
descriptors and the ones defined in descriptor.proto. This was done in order
to make source code files that use these descriptors easier to read. For
example, it is not necessary to prefix TYPE to all the values in
FieldDescriptor.Variant as is done in descriptor.proto
FieldDescriptorProto.Type.
Example:
class Pixel(messages.Message):
x = messages.IntegerField(1, required=True)
y = messages.IntegerField(2, required=True)
color = messages.BytesField(3)
# Describe Pixel class using message descriptor.
fields = []
field = FieldDescriptor()
field.name = 'x'
field.number = 1
field.label = FieldDescriptor.Label.REQUIRED
field.variant = FieldDescriptor.Variant.INT64
fields.append(field)
field = FieldDescriptor()
field.name = 'y'
field.number = 2
field.label = FieldDescriptor.Label.REQUIRED
field.variant = FieldDescriptor.Variant.INT64
fields.append(field)
field = FieldDescriptor()
field.name = 'color'
field.number = 3
field.label = FieldDescriptor.Label.OPTIONAL
field.variant = FieldDescriptor.Variant.BYTES
fields.append(field)
message = MessageDescriptor()
message.name = 'Pixel'
message.fields = fields
# Describing is the equivalent of building the above message.
message == describe_message(Pixel)
Public Classes:
EnumValueDescriptor: Describes Enum values.
EnumDescriptor: Describes Enum classes.
FieldDescriptor: Describes field instances.
FileDescriptor: Describes a single 'file' unit.
FileSet: Describes a collection of file descriptors.
MessageDescriptor: Describes Message classes.
Public Functions:
describe_enum_value: Describe an individual enum-value.
describe_enum: Describe an Enum class.
describe_field: Describe a Field definition.
describe_file: Describe a 'file' unit from a Python module or object.
describe_file_set: Describe a file set from a list of modules or objects.
describe_message: Describe a Message definition.
"""
import codecs
import types
import six
from apitools.base.protorpclite import messages
from apitools.base.protorpclite import util
__all__ = [
'EnumDescriptor',
'EnumValueDescriptor',
'FieldDescriptor',
'MessageDescriptor',
'FileDescriptor',
'FileSet',
'DescriptorLibrary',
'describe_enum',
'describe_enum_value',
'describe_field',
'describe_message',
'describe_file',
'describe_file_set',
'describe',
'import_descriptor_loader',
]
# NOTE: MessageField is missing because message fields cannot have
# a default value at this time.
# TODO(rafek): Support default message values.
#
# Map to functions that convert default values of fields of a given type
# to a string. The function must return a value that is compatible with
# FieldDescriptor.default_value and therefore a unicode string.
_DEFAULT_TO_STRING_MAP = {
messages.IntegerField: six.text_type,
messages.FloatField: six.text_type,
messages.BooleanField: lambda value: value and u'true' or u'false',
messages.BytesField: lambda value: codecs.escape_encode(value)[0],
messages.StringField: lambda value: value,
messages.EnumField: lambda value: six.text_type(value.number),
}
_DEFAULT_FROM_STRING_MAP = {
messages.IntegerField: int,
messages.FloatField: float,
messages.BooleanField: lambda value: value == u'true',
messages.BytesField: lambda value: codecs.escape_decode(value)[0],
messages.StringField: lambda value: value,
messages.EnumField: int,
}
class EnumValueDescriptor(messages.Message):
"""Enum value descriptor.
Fields:
name: Name of enumeration value.
number: Number of enumeration value.
"""
# TODO(rafek): Why are these listed as optional in descriptor.proto.
# Harmonize?
name = messages.StringField(1, required=True)
number = messages.IntegerField(2,
required=True,
variant=messages.Variant.INT32)
class EnumDescriptor(messages.Message):
"""Enum class descriptor.
Fields:
name: Name of Enum without any qualification.
values: Values defined by Enum class.
"""
name = messages.StringField(1)
values = messages.MessageField(EnumValueDescriptor, 2, repeated=True)
class FieldDescriptor(messages.Message):
"""Field definition descriptor.
Enums:
Variant: Wire format hint sub-types for field.
Label: Values for optional, required and repeated fields.
Fields:
name: Name of field.
number: Number of field.
variant: Variant of field.
type_name: Type name for message and enum fields.
default_value: String representation of default value.
"""
Variant = messages.Variant # pylint:disable=invalid-name
class Label(messages.Enum):
"""Field label."""
OPTIONAL = 1
REQUIRED = 2
REPEATED = 3
name = messages.StringField(1, required=True)
number = messages.IntegerField(3,
required=True,
variant=messages.Variant.INT32)
label = messages.EnumField(Label, 4, default=Label.OPTIONAL)
variant = messages.EnumField(Variant, 5)
type_name = messages.StringField(6)
# For numeric types, contains the original text representation of
# the value.
# For booleans, "true" or "false".
# For strings, contains the default text contents (not escaped in any
# way).
# For bytes, contains the C escaped value. All bytes < 128 are that are
# traditionally considered unprintable are also escaped.
default_value = messages.StringField(7)
class MessageDescriptor(messages.Message):
"""Message definition descriptor.
Fields:
name: Name of Message without any qualification.
fields: Fields defined for message.
message_types: Nested Message classes defined on message.
enum_types: Nested Enum classes defined on message.
"""
name = messages.StringField(1)
fields = messages.MessageField(FieldDescriptor, 2, repeated=True)
message_types = messages.MessageField(
'apitools.base.protorpclite.descriptor.MessageDescriptor', 3,
repeated=True)
enum_types = messages.MessageField(EnumDescriptor, 4, repeated=True)
class FileDescriptor(messages.Message):
"""Description of file containing protobuf definitions.
Fields:
package: Fully qualified name of package that definitions belong to.
message_types: Message definitions contained in file.
enum_types: Enum definitions contained in file.
"""
package = messages.StringField(2)
# TODO(rafek): Add dependency field
message_types = messages.MessageField(MessageDescriptor, 4, repeated=True)
enum_types = messages.MessageField(EnumDescriptor, 5, repeated=True)
class FileSet(messages.Message):
"""A collection of FileDescriptors.
Fields:
files: Files in file-set.
"""
files = messages.MessageField(FileDescriptor, 1, repeated=True)
def describe_enum_value(enum_value):
"""Build descriptor for Enum instance.
Args:
enum_value: Enum value to provide descriptor for.
Returns:
Initialized EnumValueDescriptor instance describing the Enum instance.
"""
enum_value_descriptor = EnumValueDescriptor()
enum_value_descriptor.name = six.text_type(enum_value.name)
enum_value_descriptor.number = enum_value.number
return enum_value_descriptor
def describe_enum(enum_definition):
"""Build descriptor for Enum class.
Args:
enum_definition: Enum class to provide descriptor for.
Returns:
Initialized EnumDescriptor instance describing the Enum class.
"""
enum_descriptor = EnumDescriptor()
enum_descriptor.name = enum_definition.definition_name().split('.')[-1]
values = []
for number in sorted(enum_definition.numbers()):
value = enum_definition.lookup_by_number(number)
values.append(describe_enum_value(value))
if values:
enum_descriptor.values = values
return enum_descriptor
def describe_field(field_definition):
"""Build descriptor for Field instance.
Args:
field_definition: Field instance to provide descriptor for.
Returns:
Initialized FieldDescriptor instance describing the Field instance.
"""
field_descriptor = FieldDescriptor()
field_descriptor.name = field_definition.name
field_descriptor.number = field_definition.number
field_descriptor.variant = field_definition.variant
if isinstance(field_definition, messages.EnumField):
field_descriptor.type_name = field_definition.type.definition_name()
if isinstance(field_definition, messages.MessageField):
field_descriptor.type_name = (
field_definition.message_type.definition_name())
if field_definition.default is not None:
field_descriptor.default_value = _DEFAULT_TO_STRING_MAP[
type(field_definition)](field_definition.default)
# Set label.
if field_definition.repeated:
field_descriptor.label = FieldDescriptor.Label.REPEATED
elif field_definition.required:
field_descriptor.label = FieldDescriptor.Label.REQUIRED
else:
field_descriptor.label = FieldDescriptor.Label.OPTIONAL
return field_descriptor
def describe_message(message_definition):
"""Build descriptor for Message class.
Args:
message_definition: Message class to provide descriptor for.
Returns:
Initialized MessageDescriptor instance describing the Message class.
"""
message_descriptor = MessageDescriptor()
message_descriptor.name = message_definition.definition_name().split(
'.')[-1]
fields = sorted(message_definition.all_fields(),
key=lambda v: v.number)
if fields:
message_descriptor.fields = [describe_field(field) for field in fields]
try:
nested_messages = message_definition.__messages__
except AttributeError:
pass
else:
message_descriptors = []
for name in nested_messages:
value = getattr(message_definition, name)
message_descriptors.append(describe_message(value))
message_descriptor.message_types = message_descriptors
try:
nested_enums = message_definition.__enums__
except AttributeError:
pass
else:
enum_descriptors = []
for name in nested_enums:
value = getattr(message_definition, name)
enum_descriptors.append(describe_enum(value))
message_descriptor.enum_types = enum_descriptors
return message_descriptor
def describe_file(module):
"""Build a file from a specified Python module.
Args:
module: Python module to describe.
Returns:
Initialized FileDescriptor instance describing the module.
"""
descriptor = FileDescriptor()
descriptor.package = util.get_package_for_module(module)
if not descriptor.package:
descriptor.package = None
message_descriptors = []
enum_descriptors = []
# Need to iterate over all top level attributes of the module looking for
# message and enum definitions. Each definition must be itself described.
for name in sorted(dir(module)):
value = getattr(module, name)
if isinstance(value, type):
if issubclass(value, messages.Message):
message_descriptors.append(describe_message(value))
elif issubclass(value, messages.Enum):
enum_descriptors.append(describe_enum(value))
if message_descriptors:
descriptor.message_types = message_descriptors
if enum_descriptors:
descriptor.enum_types = enum_descriptors
return descriptor
def describe_file_set(modules):
"""Build a file set from a specified Python modules.
Args:
modules: Iterable of Python module to describe.
Returns:
Initialized FileSet instance describing the modules.
"""
descriptor = FileSet()
file_descriptors = []
for module in modules:
file_descriptors.append(describe_file(module))
if file_descriptors:
descriptor.files = file_descriptors
return descriptor
def describe(value):
"""Describe any value as a descriptor.
Helper function for describing any object with an appropriate descriptor
object.
Args:
value: Value to describe as a descriptor.
Returns:
Descriptor message class if object is describable as a descriptor, else
None.
"""
if isinstance(value, types.ModuleType):
return describe_file(value)
elif isinstance(value, messages.Field):
return describe_field(value)
elif isinstance(value, messages.Enum):
return describe_enum_value(value)
elif isinstance(value, type):
if issubclass(value, messages.Message):
return describe_message(value)
elif issubclass(value, messages.Enum):
return describe_enum(value)
return None
@util.positional(1)
def import_descriptor_loader(definition_name, importer=__import__):
"""Find objects by importing modules as needed.
A definition loader is a function that resolves a definition name to a
descriptor.
The import finder resolves definitions to their names by importing modules
when necessary.
Args:
definition_name: Name of definition to find.
importer: Import function used for importing new modules.
Returns:
Appropriate descriptor for any describable type located by name.
Raises:
DefinitionNotFoundError when a name does not refer to either a definition
or a module.
"""
# Attempt to import descriptor as a module.
if definition_name.startswith('.'):
definition_name = definition_name[1:]
if not definition_name.startswith('.'):
leaf = definition_name.split('.')[-1]
if definition_name:
try:
module = importer(definition_name, '', '', [leaf])
except ImportError:
pass
else:
return describe(module)
try:
# Attempt to use messages.find_definition to find item.
return describe(messages.find_definition(definition_name,
importer=__import__))
except messages.DefinitionNotFoundError as err:
# There are things that find_definition will not find, but if
# the parent is loaded, its children can be searched for a
# match.
split_name = definition_name.rsplit('.', 1)
if len(split_name) > 1:
parent, child = split_name
try:
parent_definition = import_descriptor_loader(
parent, importer=importer)
except messages.DefinitionNotFoundError:
# Fall through to original error.
pass
else:
# Check the parent definition for a matching descriptor.
if isinstance(parent_definition, EnumDescriptor):
search_list = parent_definition.values or []
elif isinstance(parent_definition, MessageDescriptor):
search_list = parent_definition.fields or []
else:
search_list = []
for definition in search_list:
if definition.name == child:
return definition
# Still didn't find. Reraise original exception.
raise err
class DescriptorLibrary(object):
"""A descriptor library is an object that contains known definitions.
A descriptor library contains a cache of descriptor objects mapped by
definition name. It contains all types of descriptors except for
file sets.
When a definition name is requested that the library does not know about
it can be provided with a descriptor loader which attempt to resolve the
missing descriptor.
"""
@util.positional(1)
def __init__(self,
descriptors=None,
descriptor_loader=import_descriptor_loader):
"""Constructor.
Args:
descriptors: A dictionary or dictionary-like object that can be used
to store and cache descriptors by definition name.
definition_loader: A function used for resolving missing descriptors.
The function takes a definition name as its parameter and returns
an appropriate descriptor. It may raise DefinitionNotFoundError.
"""
self.__descriptor_loader = descriptor_loader
self.__descriptors = descriptors or {}
def lookup_descriptor(self, definition_name):
"""Lookup descriptor by name.
Get descriptor from library by name. If descriptor is not found will
attempt to find via descriptor loader if provided.
Args:
definition_name: Definition name to find.
Returns:
Descriptor that describes definition name.
Raises:
DefinitionNotFoundError if not descriptor exists for definition name.
"""
try:
return self.__descriptors[definition_name]
except KeyError:
pass
if self.__descriptor_loader:
definition = self.__descriptor_loader(definition_name)
self.__descriptors[definition_name] = definition
return definition
else:
raise messages.DefinitionNotFoundError(
'Could not find definition for %s' % definition_name)
def lookup_package(self, definition_name):
"""Determines the package name for any definition.
Determine the package that any definition name belongs to. May
check parent for package name and will resolve missing
descriptors if provided descriptor loader.
Args:
definition_name: Definition name to find package for.
"""
while True:
descriptor = self.lookup_descriptor(definition_name)
if isinstance(descriptor, FileDescriptor):
return descriptor.package
else:
index = definition_name.rfind('.')
if index < 0:
return None
definition_name = definition_name[:index]
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/protorpclite/util_test.py | apitools/base/protorpclite/util_test.py | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for apitools.base.protorpclite.util."""
import datetime
import sys
import types
import unittest
import six
from apitools.base.protorpclite import test_util
from apitools.base.protorpclite import util
class ModuleInterfaceTest(test_util.ModuleInterfaceTest,
test_util.TestCase):
MODULE = util
class UtilTest(test_util.TestCase):
def testDecoratedFunction_LengthZero(self):
@util.positional(0)
def fn(kwonly=1):
return [kwonly]
self.assertEqual([1], fn())
self.assertEqual([2], fn(kwonly=2))
self.assertRaisesWithRegexpMatch(TypeError,
r'fn\(\) takes at most 0 positional '
r'arguments \(1 given\)',
fn, 1)
def testDecoratedFunction_LengthOne(self):
@util.positional(1)
def fn(pos, kwonly=1):
return [pos, kwonly]
self.assertEqual([1, 1], fn(1))
self.assertEqual([2, 2], fn(2, kwonly=2))
self.assertRaisesWithRegexpMatch(TypeError,
r'fn\(\) takes at most 1 positional '
r'argument \(2 given\)',
fn, 2, 3)
def testDecoratedFunction_LengthTwoWithDefault(self):
@util.positional(2)
def fn(pos1, pos2=1, kwonly=1):
return [pos1, pos2, kwonly]
self.assertEqual([1, 1, 1], fn(1))
self.assertEqual([2, 2, 1], fn(2, 2))
self.assertEqual([2, 3, 4], fn(2, 3, kwonly=4))
self.assertRaisesWithRegexpMatch(TypeError,
r'fn\(\) takes at most 2 positional '
r'arguments \(3 given\)',
fn, 2, 3, 4)
def testDecoratedMethod(self):
class MyClass(object):
@util.positional(2)
def meth(self, pos1, kwonly=1):
return [pos1, kwonly]
self.assertEqual([1, 1], MyClass().meth(1))
self.assertEqual([2, 2], MyClass().meth(2, kwonly=2))
self.assertRaisesWithRegexpMatch(
TypeError,
r'meth\(\) takes at most 2 positional arguments \(3 given\)',
MyClass().meth, 2, 3)
def testDefaultDecoration(self):
@util.positional
def fn(a, b, c=None):
return a, b, c
self.assertEqual((1, 2, 3), fn(1, 2, c=3))
self.assertEqual((3, 4, None), fn(3, b=4))
self.assertRaisesWithRegexpMatch(TypeError,
r'fn\(\) takes at most 2 positional '
r'arguments \(3 given\)',
fn, 2, 3, 4)
def testDefaultDecorationNoKwdsFails(self):
def fn(a):
return a
self.assertRaisesRegex(
ValueError,
('Functions with no keyword arguments must specify '
'max_positional_args'),
util.positional, fn)
def testDecoratedFunctionDocstring(self):
@util.positional(0)
def fn(kwonly=1):
"""fn docstring."""
return [kwonly]
self.assertEqual('fn docstring.', fn.__doc__)
class GetPackageForModuleTest(test_util.TestCase):
def setUp(self):
self.original_modules = dict(sys.modules)
def tearDown(self):
sys.modules.clear()
sys.modules.update(self.original_modules)
def CreateModule(self, name, file_name=None):
if file_name is None:
file_name = '%s.py' % name
module = types.ModuleType(name)
sys.modules[name] = module
return module
def assertPackageEquals(self, expected, actual):
self.assertEqual(expected, actual)
if actual is not None:
self.assertTrue(isinstance(actual, six.text_type))
def testByString(self):
module = self.CreateModule('service_module')
module.package = 'my_package'
self.assertPackageEquals('my_package',
util.get_package_for_module('service_module'))
def testModuleNameNotInSys(self):
self.assertPackageEquals(None,
util.get_package_for_module('service_module'))
def testHasPackage(self):
module = self.CreateModule('service_module')
module.package = 'my_package'
self.assertPackageEquals(
'my_package', util.get_package_for_module(module))
def testHasModuleName(self):
module = self.CreateModule('service_module')
self.assertPackageEquals('service_module',
util.get_package_for_module(module))
def testIsMain(self):
module = self.CreateModule('__main__')
module.__file__ = '/bing/blam/bloom/blarm/my_file.py'
self.assertPackageEquals(
'my_file', util.get_package_for_module(module))
def testIsMainCompiled(self):
module = self.CreateModule('__main__')
module.__file__ = '/bing/blam/bloom/blarm/my_file.pyc'
self.assertPackageEquals(
'my_file', util.get_package_for_module(module))
def testNoExtension(self):
module = self.CreateModule('__main__')
module.__file__ = '/bing/blam/bloom/blarm/my_file'
self.assertPackageEquals(
'my_file', util.get_package_for_module(module))
def testNoPackageAtAll(self):
module = self.CreateModule('__main__')
self.assertPackageEquals(
'__main__', util.get_package_for_module(module))
class DateTimeTests(test_util.TestCase):
def testDecodeDateTime(self):
"""Test that a RFC 3339 datetime string is decoded properly."""
for datetime_string, datetime_vals in (
('2012-09-30T15:31:50.262', (2012, 9, 30, 15, 31, 50, 262000)),
('2012-09-30T15:31:50', (2012, 9, 30, 15, 31, 50, 0))):
decoded = util.decode_datetime(datetime_string)
expected = datetime.datetime(*datetime_vals)
self.assertEqual(expected, decoded)
def testDecodeDateTimeWithTruncateTime(self):
"""Test that nanosec time is truncated with truncate_time flag."""
decoded = util.decode_datetime('2012-09-30T15:31:50.262343123',
truncate_time=True)
expected = datetime.datetime(2012, 9, 30, 15, 31, 50, 262343)
self.assertEqual(expected, decoded)
def testDateTimeTimeZones(self):
"""Test that a datetime string with a timezone is decoded correctly."""
tests = (
('2012-09-30T15:31:50.262-06:00',
(2012, 9, 30, 15, 31, 50, 262000, util.TimeZoneOffset(-360))),
('2012-09-30T15:31:50.262+01:30',
(2012, 9, 30, 15, 31, 50, 262000, util.TimeZoneOffset(90))),
('2012-09-30T15:31:50+00:05',
(2012, 9, 30, 15, 31, 50, 0, util.TimeZoneOffset(5))),
('2012-09-30T15:31:50+00:00',
(2012, 9, 30, 15, 31, 50, 0, util.TimeZoneOffset(0))),
('2012-09-30t15:31:50-00:00',
(2012, 9, 30, 15, 31, 50, 0, util.TimeZoneOffset(0))),
('2012-09-30t15:31:50z',
(2012, 9, 30, 15, 31, 50, 0, util.TimeZoneOffset(0))),
('2012-09-30T15:31:50-23:00',
(2012, 9, 30, 15, 31, 50, 0, util.TimeZoneOffset(-1380))))
for datetime_string, datetime_vals in tests:
decoded = util.decode_datetime(datetime_string)
expected = datetime.datetime(*datetime_vals)
self.assertEqual(expected, decoded)
def testDecodeDateTimeInvalid(self):
"""Test that decoding malformed datetime strings raises execptions."""
for datetime_string in ('invalid',
'2012-09-30T15:31:50.',
'-08:00 2012-09-30T15:31:50.262',
'2012-09-30T15:31',
'2012-09-30T15:31Z',
'2012-09-30T15:31:50ZZ',
'2012-09-30T15:31:50.262 blah blah -08:00',
'1000-99-99T25:99:99.999-99:99',
'2012-09-30T15:31:50.262343123'):
self.assertRaises(
ValueError, util.decode_datetime, datetime_string)
def testTimeZoneOffsetDelta(self):
"""Test that delta works with TimeZoneOffset."""
time_zone = util.TimeZoneOffset(datetime.timedelta(minutes=3))
epoch = time_zone.utcoffset(datetime.datetime.fromtimestamp(0, tz=datetime.timezone.utc).replace(tzinfo=None))
self.assertEqual(180, util.total_seconds(epoch))
def main():
unittest.main()
if __name__ == '__main__':
main()
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/protorpclite/util.py | apitools/base/protorpclite/util.py | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Common utility library."""
from __future__ import with_statement
import datetime
import functools
import inspect
import logging
import os
import re
import sys
import six
__all__ = [
'Error',
'decode_datetime',
'get_package_for_module',
'positional',
'TimeZoneOffset',
'total_seconds',
]
class Error(Exception):
"""Base class for protorpc exceptions."""
_TIME_ZONE_RE_STRING = r"""
# Examples:
# +01:00
# -05:30
# Z12:00
((?P<z>Z) | (?P<sign>[-+])
(?P<hours>\d\d) :
(?P<minutes>\d\d))$
"""
_TIME_ZONE_RE = re.compile(_TIME_ZONE_RE_STRING, re.IGNORECASE | re.VERBOSE)
def positional(max_positional_args):
"""A decorator that declares only the first N arguments may be positional.
This decorator makes it easy to support Python 3 style keyword-only
parameters. For example, in Python 3 it is possible to write:
def fn(pos1, *, kwonly1=None, kwonly1=None):
...
All named parameters after * must be a keyword:
fn(10, 'kw1', 'kw2') # Raises exception.
fn(10, kwonly1='kw1') # Ok.
Example:
To define a function like above, do:
@positional(1)
def fn(pos1, kwonly1=None, kwonly2=None):
...
If no default value is provided to a keyword argument, it
becomes a required keyword argument:
@positional(0)
def fn(required_kw):
...
This must be called with the keyword parameter:
fn() # Raises exception.
fn(10) # Raises exception.
fn(required_kw=10) # Ok.
When defining instance or class methods always remember to account for
'self' and 'cls':
class MyClass(object):
@positional(2)
def my_method(self, pos1, kwonly1=None):
...
@classmethod
@positional(2)
def my_method(cls, pos1, kwonly1=None):
...
One can omit the argument to 'positional' altogether, and then no
arguments with default values may be passed positionally. This
would be equivalent to placing a '*' before the first argument
with a default value in Python 3. If there are no arguments with
default values, and no argument is given to 'positional', an error
is raised.
@positional
def fn(arg1, arg2, required_kw1=None, required_kw2=0):
...
fn(1, 3, 5) # Raises exception.
fn(1, 3) # Ok.
fn(1, 3, required_kw1=5) # Ok.
Args:
max_positional_arguments: Maximum number of positional arguments. All
parameters after the this index must be keyword only.
Returns:
A decorator that prevents using arguments after max_positional_args from
being used as positional parameters.
Raises:
TypeError if a keyword-only argument is provided as a positional
parameter.
ValueError if no maximum number of arguments is provided and the function
has no arguments with default values.
"""
def positional_decorator(wrapped):
"""Creates a function wraper to enforce number of arguments."""
@functools.wraps(wrapped)
def positional_wrapper(*args, **kwargs):
if len(args) > max_positional_args:
plural_s = ''
if max_positional_args != 1:
plural_s = 's'
raise TypeError('%s() takes at most %d positional argument%s '
'(%d given)' % (wrapped.__name__,
max_positional_args,
plural_s, len(args)))
return wrapped(*args, **kwargs)
return positional_wrapper
if isinstance(max_positional_args, six.integer_types):
return positional_decorator
else:
args, _, _, defaults, *_ = inspect.getfullargspec(max_positional_args)
if defaults is None:
raise ValueError(
'Functions with no keyword arguments must specify '
'max_positional_args')
return positional(len(args) - len(defaults))(max_positional_args)
@positional(1)
def get_package_for_module(module):
"""Get package name for a module.
Helper calculates the package name of a module.
Args:
module: Module to get name for. If module is a string, try to find
module in sys.modules.
Returns:
If module contains 'package' attribute, uses that as package name.
Else, if module is not the '__main__' module, the module __name__.
Else, the base name of the module file name. Else None.
"""
if isinstance(module, six.string_types):
try:
module = sys.modules[module]
except KeyError:
return None
try:
return six.text_type(module.package)
except AttributeError:
if module.__name__ == '__main__':
try:
file_name = module.__file__
except AttributeError:
pass
else:
base_name = os.path.basename(file_name)
split_name = os.path.splitext(base_name)
if len(split_name) == 1:
return six.text_type(base_name)
return u'.'.join(split_name[:-1])
return six.text_type(module.__name__)
def total_seconds(offset):
"""Backport of offset.total_seconds() from python 2.7+."""
seconds = offset.days * 24 * 60 * 60 + offset.seconds
microseconds = seconds * 10**6 + offset.microseconds
return microseconds / (10**6 * 1.0)
class TimeZoneOffset(datetime.tzinfo):
"""Time zone information as encoded/decoded for DateTimeFields."""
def __init__(self, offset):
"""Initialize a time zone offset.
Args:
offset: Integer or timedelta time zone offset, in minutes from UTC.
This can be negative.
"""
super(TimeZoneOffset, self).__init__()
if isinstance(offset, datetime.timedelta):
offset = total_seconds(offset) / 60
self.__offset = offset
def utcoffset(self, _):
"""Get the a timedelta with the time zone's offset from UTC.
Returns:
The time zone offset from UTC, as a timedelta.
"""
return datetime.timedelta(minutes=self.__offset)
def dst(self, _):
"""Get the daylight savings time offset.
The formats that ProtoRPC uses to encode/decode time zone
information don't contain any information about daylight
savings time. So this always returns a timedelta of 0.
Returns:
A timedelta of 0.
"""
return datetime.timedelta(0)
def decode_datetime(encoded_datetime, truncate_time=False):
"""Decode a DateTimeField parameter from a string to a python datetime.
Args:
encoded_datetime: A string in RFC 3339 format.
truncate_time: If true, truncate time string with precision higher than
microsecs.
Returns:
A datetime object with the date and time specified in encoded_datetime.
Raises:
ValueError: If the string is not in a recognized format.
"""
# Check if the string includes a time zone offset. Break out the
# part that doesn't include time zone info. Convert to uppercase
# because all our comparisons should be case-insensitive.
time_zone_match = _TIME_ZONE_RE.search(encoded_datetime)
if time_zone_match:
time_string = encoded_datetime[:time_zone_match.start(1)].upper()
else:
time_string = encoded_datetime.upper()
if '.' in time_string:
format_string = '%Y-%m-%dT%H:%M:%S.%f'
else:
format_string = '%Y-%m-%dT%H:%M:%S'
try:
decoded_datetime = datetime.datetime.strptime(time_string,
format_string)
except ValueError:
if truncate_time and '.' in time_string:
datetime_string, decimal_secs = time_string.split('.')
if len(decimal_secs) > 6:
# datetime can handle only microsecs precision.
truncated_time_string = '{}.{}'.format(
datetime_string, decimal_secs[:6])
decoded_datetime = datetime.datetime.strptime(
truncated_time_string,
format_string)
logging.warning(
'Truncating the datetime string from %s to %s',
time_string, truncated_time_string)
else:
raise
else:
raise
if not time_zone_match:
return decoded_datetime
# Time zone info was included in the parameter. Add a tzinfo
# object to the datetime. Datetimes can't be changed after they're
# created, so we'll need to create a new one.
if time_zone_match.group('z'):
offset_minutes = 0
else:
sign = time_zone_match.group('sign')
hours, minutes = [int(value) for value in
time_zone_match.group('hours', 'minutes')]
offset_minutes = hours * 60 + minutes
if sign == '-':
offset_minutes *= -1
return datetime.datetime(decoded_datetime.year,
decoded_datetime.month,
decoded_datetime.day,
decoded_datetime.hour,
decoded_datetime.minute,
decoded_datetime.second,
decoded_datetime.microsecond,
TimeZoneOffset(offset_minutes))
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/protorpclite/test_util.py | apitools/base/protorpclite/test_util.py | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Test utilities for message testing.
Includes module interface test to ensure that public parts of module are
correctly declared in __all__.
Includes message types that correspond to those defined in
services_test.proto.
Includes additional test utilities to make sure encoding/decoding libraries
conform.
"""
import cgi
import datetime
import inspect
import os
import re
import socket
import types
import unittest
import six
from six.moves import range # pylint: disable=redefined-builtin
from apitools.base.protorpclite import message_types
from apitools.base.protorpclite import messages
from apitools.base.protorpclite import util
# Unicode of the word "Russian" in cyrillic.
RUSSIAN = u'\u0440\u0443\u0441\u0441\u043a\u0438\u0439'
# All characters binary value interspersed with nulls.
BINARY = b''.join(six.int2byte(value) + b'\0' for value in range(256))
class TestCase(unittest.TestCase):
def assertRaisesWithRegexpMatch(self,
exception,
regexp,
function,
*params,
**kwargs):
"""Check that exception is raised and text matches regular expression.
Args:
exception: Exception type that is expected.
regexp: String regular expression that is expected in error message.
function: Callable to test.
params: Parameters to forward to function.
kwargs: Keyword arguments to forward to function.
"""
try:
function(*params, **kwargs)
self.fail('Expected exception %s was not raised' %
exception.__name__)
except exception as err:
match = bool(re.match(regexp, str(err)))
self.assertTrue(match, 'Expected match "%s", found "%s"' % (regexp,
err))
def assertHeaderSame(self, header1, header2):
"""Check that two HTTP headers are the same.
Args:
header1: Header value string 1.
header2: header value string 2.
"""
value1, params1 = cgi.parse_header(header1)
value2, params2 = cgi.parse_header(header2)
self.assertEqual(value1, value2)
self.assertEqual(params1, params2)
def assertIterEqual(self, iter1, iter2):
"""Check two iterators or iterables are equal independent of order.
Similar to Python 2.7 assertItemsEqual. Named differently in order to
avoid potential conflict.
Args:
iter1: An iterator or iterable.
iter2: An iterator or iterable.
"""
list1 = list(iter1)
list2 = list(iter2)
unmatched1 = list()
while list1:
item1 = list1[0]
del list1[0]
for index in range(len(list2)):
if item1 == list2[index]:
del list2[index]
break
else:
unmatched1.append(item1)
error_message = []
for item in unmatched1:
error_message.append(
' Item from iter1 not found in iter2: %r' % item)
for item in list2:
error_message.append(
' Item from iter2 not found in iter1: %r' % item)
if error_message:
self.fail('Collections not equivalent:\n' +
'\n'.join(error_message))
class ModuleInterfaceTest(object):
"""Test to ensure module interface is carefully constructed.
A module interface is the set of public objects listed in the
module __all__ attribute. Modules that that are considered public
should have this interface carefully declared. At all times, the
__all__ attribute should have objects intended to be publically
used and all other objects in the module should be considered
unused.
Protected attributes (those beginning with '_') and other imported
modules should not be part of this set of variables. An exception
is for variables that begin and end with '__' which are implicitly
part of the interface (eg. __name__, __file__, __all__ itself,
etc.).
Modules that are imported in to the tested modules are an
exception and may be left out of the __all__ definition. The test
is done by checking the value of what would otherwise be a public
name and not allowing it to be exported if it is an instance of a
module. Modules that are explicitly exported are for the time
being not permitted.
To use this test class a module should define a new class that
inherits first from ModuleInterfaceTest and then from
test_util.TestCase. No other tests should be added to this test
case, making the order of inheritance less important, but if setUp
for some reason is overidden, it is important that
ModuleInterfaceTest is first in the list so that its setUp method
is invoked.
Multiple inheritance is required so that ModuleInterfaceTest is
not itself a test, and is not itself executed as one.
The test class is expected to have the following class attributes
defined:
MODULE: A reference to the module that is being validated for interface
correctness.
Example:
Module definition (hello.py):
import sys
__all__ = ['hello']
def _get_outputter():
return sys.stdout
def hello():
_get_outputter().write('Hello\n')
Test definition:
import unittest
from protorpc import test_util
import hello
class ModuleInterfaceTest(test_util.ModuleInterfaceTest,
test_util.TestCase):
MODULE = hello
class HelloTest(test_util.TestCase):
... Test 'hello' module ...
if __name__ == '__main__':
unittest.main()
"""
def setUp(self):
"""Set up makes sure that MODULE and IMPORTED_MODULES is defined.
This is a basic configuration test for the test itself so does not
get it's own test case.
"""
if not hasattr(self, 'MODULE'):
self.fail(
"You must define 'MODULE' on ModuleInterfaceTest sub-class "
"%s." % type(self).__name__)
def testAllExist(self):
"""Test that all attributes defined in __all__ exist."""
missing_attributes = []
for attribute in self.MODULE.__all__:
if not hasattr(self.MODULE, attribute):
missing_attributes.append(attribute)
if missing_attributes:
self.fail('%s of __all__ are not defined in module.' %
missing_attributes)
def testAllExported(self):
"""Test that all public attributes not imported are in __all__."""
missing_attributes = []
for attribute in dir(self.MODULE):
if not attribute.startswith('_'):
if (attribute not in self.MODULE.__all__ and
not isinstance(getattr(self.MODULE, attribute),
types.ModuleType) and
attribute != 'with_statement'):
missing_attributes.append(attribute)
if missing_attributes:
self.fail('%s are not modules and not defined in __all__.' %
missing_attributes)
def testNoExportedProtectedVariables(self):
"""Test that there are no protected variables listed in __all__."""
protected_variables = []
for attribute in self.MODULE.__all__:
if attribute.startswith('_'):
protected_variables.append(attribute)
if protected_variables:
self.fail('%s are protected variables and may not be exported.' %
protected_variables)
def testNoExportedModules(self):
"""Test that no modules exist in __all__."""
exported_modules = []
for attribute in self.MODULE.__all__:
try:
value = getattr(self.MODULE, attribute)
except AttributeError:
# This is a different error case tested for in testAllExist.
pass
else:
if isinstance(value, types.ModuleType):
exported_modules.append(attribute)
if exported_modules:
self.fail('%s are modules and may not be exported.' %
exported_modules)
class NestedMessage(messages.Message):
"""Simple message that gets nested in another message."""
a_value = messages.StringField(1, required=True)
class HasNestedMessage(messages.Message):
"""Message that has another message nested in it."""
nested = messages.MessageField(NestedMessage, 1)
repeated_nested = messages.MessageField(NestedMessage, 2, repeated=True)
class HasDefault(messages.Message):
"""Has a default value."""
a_value = messages.StringField(1, default=u'a default')
class OptionalMessage(messages.Message):
"""Contains all message types."""
class SimpleEnum(messages.Enum):
"""Simple enumeration type."""
VAL1 = 1
VAL2 = 2
double_value = messages.FloatField(1, variant=messages.Variant.DOUBLE)
float_value = messages.FloatField(2, variant=messages.Variant.FLOAT)
int64_value = messages.IntegerField(3, variant=messages.Variant.INT64)
uint64_value = messages.IntegerField(4, variant=messages.Variant.UINT64)
int32_value = messages.IntegerField(5, variant=messages.Variant.INT32)
bool_value = messages.BooleanField(6, variant=messages.Variant.BOOL)
string_value = messages.StringField(7, variant=messages.Variant.STRING)
bytes_value = messages.BytesField(8, variant=messages.Variant.BYTES)
enum_value = messages.EnumField(SimpleEnum, 10)
class RepeatedMessage(messages.Message):
"""Contains all message types as repeated fields."""
class SimpleEnum(messages.Enum):
"""Simple enumeration type."""
VAL1 = 1
VAL2 = 2
double_value = messages.FloatField(1,
variant=messages.Variant.DOUBLE,
repeated=True)
float_value = messages.FloatField(2,
variant=messages.Variant.FLOAT,
repeated=True)
int64_value = messages.IntegerField(3,
variant=messages.Variant.INT64,
repeated=True)
uint64_value = messages.IntegerField(4,
variant=messages.Variant.UINT64,
repeated=True)
int32_value = messages.IntegerField(5,
variant=messages.Variant.INT32,
repeated=True)
bool_value = messages.BooleanField(6,
variant=messages.Variant.BOOL,
repeated=True)
string_value = messages.StringField(7,
variant=messages.Variant.STRING,
repeated=True)
bytes_value = messages.BytesField(8,
variant=messages.Variant.BYTES,
repeated=True)
enum_value = messages.EnumField(SimpleEnum,
10,
repeated=True)
class HasOptionalNestedMessage(messages.Message):
nested = messages.MessageField(OptionalMessage, 1)
repeated_nested = messages.MessageField(OptionalMessage, 2, repeated=True)
# pylint:disable=anomalous-unicode-escape-in-string
class ProtoConformanceTestBase(object):
"""Protocol conformance test base class.
Each supported protocol should implement two methods that support encoding
and decoding of Message objects in that format:
encode_message(message) - Serialize to encoding.
encode_message(message, encoded_message) - Deserialize from encoding.
Tests for the modules where these functions are implemented should extend
this class in order to support basic behavioral expectations. This ensures
that protocols correctly encode and decode message transparently to the
caller.
In order to support these test, the base class should also extend
the TestCase class and implement the following class attributes
which define the encoded version of certain protocol buffers:
encoded_partial:
<OptionalMessage
double_value: 1.23
int64_value: -100000000000
string_value: u"a string"
enum_value: OptionalMessage.SimpleEnum.VAL2
>
encoded_full:
<OptionalMessage
double_value: 1.23
float_value: -2.5
int64_value: -100000000000
uint64_value: 102020202020
int32_value: 1020
bool_value: true
string_value: u"a string\u044f"
bytes_value: b"a bytes\xff\xfe"
enum_value: OptionalMessage.SimpleEnum.VAL2
>
encoded_repeated:
<RepeatedMessage
double_value: [1.23, 2.3]
float_value: [-2.5, 0.5]
int64_value: [-100000000000, 20]
uint64_value: [102020202020, 10]
int32_value: [1020, 718]
bool_value: [true, false]
string_value: [u"a string\u044f", u"another string"]
bytes_value: [b"a bytes\xff\xfe", b"another bytes"]
enum_value: [OptionalMessage.SimpleEnum.VAL2,
OptionalMessage.SimpleEnum.VAL 1]
>
encoded_nested:
<HasNestedMessage
nested: <NestedMessage
a_value: "a string"
>
>
encoded_repeated_nested:
<HasNestedMessage
repeated_nested: [
<NestedMessage a_value: "a string">,
<NestedMessage a_value: "another string">
]
>
unexpected_tag_message:
An encoded message that has an undefined tag or number in the stream.
encoded_default_assigned:
<HasDefault
a_value: "a default"
>
encoded_nested_empty:
<HasOptionalNestedMessage
nested: <OptionalMessage>
>
encoded_invalid_enum:
<OptionalMessage
enum_value: (invalid value for serialization type)
>
encoded_invalid_repeated_enum:
<RepeatedMessage
enum_value: (invalid value for serialization type)
>
"""
encoded_empty_message = ''
def testEncodeInvalidMessage(self):
message = NestedMessage()
self.assertRaises(messages.ValidationError,
self.PROTOLIB.encode_message, message)
def CompareEncoded(self, expected_encoded, actual_encoded):
"""Compare two encoded protocol values.
Can be overridden by sub-classes to special case comparison.
For example, to eliminate white space from output that is not
relevant to encoding.
Args:
expected_encoded: Expected string encoded value.
actual_encoded: Actual string encoded value.
"""
self.assertEqual(expected_encoded, actual_encoded)
def EncodeDecode(self, encoded, expected_message):
message = self.PROTOLIB.decode_message(type(expected_message), encoded)
self.assertEqual(expected_message, message)
self.CompareEncoded(encoded, self.PROTOLIB.encode_message(message))
def testEmptyMessage(self):
self.EncodeDecode(self.encoded_empty_message, OptionalMessage())
def testPartial(self):
"""Test message with a few values set."""
message = OptionalMessage()
message.double_value = 1.23
message.int64_value = -100000000000
message.int32_value = 1020
message.string_value = u'a string'
message.enum_value = OptionalMessage.SimpleEnum.VAL2
self.EncodeDecode(self.encoded_partial, message)
def testFull(self):
"""Test all types."""
message = OptionalMessage()
message.double_value = 1.23
message.float_value = -2.5
message.int64_value = -100000000000
message.uint64_value = 102020202020
message.int32_value = 1020
message.bool_value = True
message.string_value = u'a string\u044f'
message.bytes_value = b'a bytes\xff\xfe'
message.enum_value = OptionalMessage.SimpleEnum.VAL2
self.EncodeDecode(self.encoded_full, message)
def testRepeated(self):
"""Test repeated fields."""
message = RepeatedMessage()
message.double_value = [1.23, 2.3]
message.float_value = [-2.5, 0.5]
message.int64_value = [-100000000000, 20]
message.uint64_value = [102020202020, 10]
message.int32_value = [1020, 718]
message.bool_value = [True, False]
message.string_value = [u'a string\u044f', u'another string']
message.bytes_value = [b'a bytes\xff\xfe', b'another bytes']
message.enum_value = [RepeatedMessage.SimpleEnum.VAL2,
RepeatedMessage.SimpleEnum.VAL1]
self.EncodeDecode(self.encoded_repeated, message)
def testNested(self):
"""Test nested messages."""
nested_message = NestedMessage()
nested_message.a_value = u'a string'
message = HasNestedMessage()
message.nested = nested_message
self.EncodeDecode(self.encoded_nested, message)
def testRepeatedNested(self):
"""Test repeated nested messages."""
nested_message1 = NestedMessage()
nested_message1.a_value = u'a string'
nested_message2 = NestedMessage()
nested_message2.a_value = u'another string'
message = HasNestedMessage()
message.repeated_nested = [nested_message1, nested_message2]
self.EncodeDecode(self.encoded_repeated_nested, message)
def testStringTypes(self):
"""Test that encoding str on StringField works."""
message = OptionalMessage()
message.string_value = 'Latin'
self.EncodeDecode(self.encoded_string_types, message)
def testEncodeUninitialized(self):
"""Test that cannot encode uninitialized message."""
required = NestedMessage()
self.assertRaisesWithRegexpMatch(messages.ValidationError,
"Message NestedMessage is missing "
"required field a_value",
self.PROTOLIB.encode_message,
required)
def testUnexpectedField(self):
"""Test decoding and encoding unexpected fields."""
loaded_message = self.PROTOLIB.decode_message(
OptionalMessage, self.unexpected_tag_message)
# Message should be equal to an empty message, since unknown
# values aren't included in equality.
self.assertEqual(OptionalMessage(), loaded_message)
# Verify that the encoded message matches the source, including the
# unknown value.
self.assertEqual(self.unexpected_tag_message,
self.PROTOLIB.encode_message(loaded_message))
def testDoNotSendDefault(self):
"""Test that default is not sent when nothing is assigned."""
self.EncodeDecode(self.encoded_empty_message, HasDefault())
def testSendDefaultExplicitlyAssigned(self):
"""Test that default is sent when explcitly assigned."""
message = HasDefault()
message.a_value = HasDefault.a_value.default
self.EncodeDecode(self.encoded_default_assigned, message)
def testEncodingNestedEmptyMessage(self):
"""Test encoding a nested empty message."""
message = HasOptionalNestedMessage()
message.nested = OptionalMessage()
self.EncodeDecode(self.encoded_nested_empty, message)
def testEncodingRepeatedNestedEmptyMessage(self):
"""Test encoding a nested empty message."""
message = HasOptionalNestedMessage()
message.repeated_nested = [OptionalMessage(), OptionalMessage()]
self.EncodeDecode(self.encoded_repeated_nested_empty, message)
def testContentType(self):
self.assertTrue(isinstance(self.PROTOLIB.CONTENT_TYPE, str))
def testDecodeInvalidEnumType(self):
# Since protos need to be able to add new enums, a message should be
# successfully decoded even if the enum value is invalid. Encoding the
# decoded message should result in equivalence with the original
# encoded message containing an invalid enum.
decoded = self.PROTOLIB.decode_message(OptionalMessage,
self.encoded_invalid_enum)
message = OptionalMessage()
self.assertEqual(message, decoded)
encoded = self.PROTOLIB.encode_message(decoded)
self.assertEqual(self.encoded_invalid_enum, encoded)
def testDecodeInvalidRepeatedEnumType(self):
# Since protos need to be able to add new enums, a message should be
# successfully decoded even if the enum value is invalid. Encoding the
# decoded message should result in equivalence with the original
# encoded message containing an invalid enum.
decoded = self.PROTOLIB.decode_message(RepeatedMessage,
self.encoded_invalid_repeated_enum)
message = RepeatedMessage()
message.enum_value = [RepeatedMessage.SimpleEnum.VAL1]
self.assertEqual(message, decoded)
encoded = self.PROTOLIB.encode_message(decoded)
self.assertEqual(self.encoded_invalid_repeated_enum, encoded)
def testDateTimeNoTimeZone(self):
"""Test that DateTimeFields are encoded/decoded correctly."""
class MyMessage(messages.Message):
value = message_types.DateTimeField(1)
value = datetime.datetime(2013, 1, 3, 11, 36, 30, 123000)
message = MyMessage(value=value)
decoded = self.PROTOLIB.decode_message(
MyMessage, self.PROTOLIB.encode_message(message))
self.assertEqual(decoded.value, value)
def testDateTimeWithTimeZone(self):
"""Test DateTimeFields with time zones."""
class MyMessage(messages.Message):
value = message_types.DateTimeField(1)
value = datetime.datetime(2013, 1, 3, 11, 36, 30, 123000,
util.TimeZoneOffset(8 * 60))
message = MyMessage(value=value)
decoded = self.PROTOLIB.decode_message(
MyMessage, self.PROTOLIB.encode_message(message))
self.assertEqual(decoded.value, value)
def pick_unused_port():
"""Find an unused port to use in tests.
Derived from Damon Kohlers example:
http://code.activestate.com/recipes/531822-pick-unused-port
"""
temp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
temp.bind(('localhost', 0))
port = temp.getsockname()[1]
finally:
temp.close()
return port
def get_module_name(module_attribute):
"""Get the module name.
Args:
module_attribute: An attribute of the module.
Returns:
The fully qualified module name or simple module name where
'module_attribute' is defined if the module name is "__main__".
"""
if module_attribute.__module__ == '__main__':
module_file = inspect.getfile(module_attribute)
default = os.path.basename(module_file).split('.')[0]
return default
return module_attribute.__module__
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/protorpclite/messages_test.py | apitools/base/protorpclite/messages_test.py | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for apitools.base.protorpclite.messages."""
import pickle
import re
import sys
import types
import unittest
import six
from apitools.base.protorpclite import descriptor
from apitools.base.protorpclite import message_types
from apitools.base.protorpclite import messages
from apitools.base.protorpclite import test_util
# This package plays lots of games with modifying global variables inside
# test cases. Hence:
# pylint:disable=function-redefined
# pylint:disable=global-variable-not-assigned
# pylint:disable=global-variable-undefined
# pylint:disable=redefined-outer-name
# pylint:disable=undefined-variable
# pylint:disable=unused-variable
# pylint:disable=too-many-lines
try:
long # Python 2
except NameError:
long = int # Python 3
class ModuleInterfaceTest(test_util.ModuleInterfaceTest,
test_util.TestCase):
MODULE = messages
class ValidationErrorTest(test_util.TestCase):
def testStr_NoFieldName(self):
"""Test string version of ValidationError when no name provided."""
self.assertEqual('Validation error',
str(messages.ValidationError('Validation error')))
def testStr_FieldName(self):
"""Test string version of ValidationError when no name provided."""
validation_error = messages.ValidationError('Validation error')
validation_error.field_name = 'a_field'
self.assertEqual('Validation error', str(validation_error))
class EnumTest(test_util.TestCase):
def setUp(self):
"""Set up tests."""
# Redefine Color class in case so that changes to it (an
# error) in one test does not affect other tests.
global Color # pylint:disable=global-variable-not-assigned
# pylint:disable=unused-variable
class Color(messages.Enum):
RED = 20
ORANGE = 2
YELLOW = 40
GREEN = 4
BLUE = 50
INDIGO = 5
VIOLET = 80
def testNames(self):
"""Test that names iterates over enum names."""
self.assertEqual(
set(['BLUE', 'GREEN', 'INDIGO', 'ORANGE', 'RED',
'VIOLET', 'YELLOW']),
set(Color.names()))
def testNumbers(self):
"""Tests that numbers iterates of enum numbers."""
self.assertEqual(set([2, 4, 5, 20, 40, 50, 80]), set(Color.numbers()))
def testIterate(self):
"""Test that __iter__ iterates over all enum values."""
self.assertEqual(set(Color),
set([Color.RED,
Color.ORANGE,
Color.YELLOW,
Color.GREEN,
Color.BLUE,
Color.INDIGO,
Color.VIOLET]))
def testNaturalOrder(self):
"""Test that natural order enumeration is in numeric order."""
self.assertEqual([Color.ORANGE,
Color.GREEN,
Color.INDIGO,
Color.RED,
Color.YELLOW,
Color.BLUE,
Color.VIOLET],
sorted(Color))
def testByName(self):
"""Test look-up by name."""
self.assertEqual(Color.RED, Color.lookup_by_name('RED'))
self.assertRaises(KeyError, Color.lookup_by_name, 20)
self.assertRaises(KeyError, Color.lookup_by_name, Color.RED)
def testByNumber(self):
"""Test look-up by number."""
self.assertRaises(KeyError, Color.lookup_by_number, 'RED')
self.assertEqual(Color.RED, Color.lookup_by_number(20))
self.assertRaises(KeyError, Color.lookup_by_number, Color.RED)
def testConstructor(self):
"""Test that constructor look-up by name or number."""
self.assertEqual(Color.RED, Color('RED'))
self.assertEqual(Color.RED, Color(u'RED'))
self.assertEqual(Color.RED, Color(20))
if six.PY2:
self.assertEqual(Color.RED, Color(long(20)))
self.assertEqual(Color.RED, Color(Color.RED))
self.assertRaises(TypeError, Color, 'Not exists')
self.assertRaises(TypeError, Color, 'Red')
self.assertRaises(TypeError, Color, 100)
self.assertRaises(TypeError, Color, 10.0)
def testLen(self):
"""Test that len function works to count enums."""
self.assertEqual(7, len(Color))
def testNoSubclasses(self):
"""Test that it is not possible to sub-class enum classes."""
def declare_subclass():
class MoreColor(Color):
pass
self.assertRaises(messages.EnumDefinitionError,
declare_subclass)
def testClassNotMutable(self):
"""Test that enum classes themselves are not mutable."""
self.assertRaises(AttributeError,
setattr,
Color,
'something_new',
10)
def testInstancesMutable(self):
"""Test that enum instances are not mutable."""
self.assertRaises(TypeError,
setattr,
Color.RED,
'something_new',
10)
def testDefEnum(self):
"""Test def_enum works by building enum class from dict."""
WeekDay = messages.Enum.def_enum({'Monday': 1,
'Tuesday': 2,
'Wednesday': 3,
'Thursday': 4,
'Friday': 6,
'Saturday': 7,
'Sunday': 8},
'WeekDay')
self.assertEqual('Wednesday', WeekDay(3).name)
self.assertEqual(6, WeekDay('Friday').number)
self.assertEqual(WeekDay.Sunday, WeekDay('Sunday'))
def testNonInt(self):
"""Test that non-integer values rejection by enum def."""
self.assertRaises(messages.EnumDefinitionError,
messages.Enum.def_enum,
{'Bad': '1'},
'BadEnum')
def testNegativeInt(self):
"""Test that negative numbers rejection by enum def."""
self.assertRaises(messages.EnumDefinitionError,
messages.Enum.def_enum,
{'Bad': -1},
'BadEnum')
def testLowerBound(self):
"""Test that zero is accepted by enum def."""
class NotImportant(messages.Enum):
"""Testing for value zero"""
VALUE = 0
self.assertEqual(0, int(NotImportant.VALUE))
def testTooLargeInt(self):
"""Test that numbers too large are rejected."""
self.assertRaises(messages.EnumDefinitionError,
messages.Enum.def_enum,
{'Bad': (2 ** 29)},
'BadEnum')
def testRepeatedInt(self):
"""Test duplicated numbers are forbidden."""
self.assertRaises(messages.EnumDefinitionError,
messages.Enum.def_enum,
{'Ok': 1, 'Repeated': 1},
'BadEnum')
def testStr(self):
"""Test converting to string."""
self.assertEqual('RED', str(Color.RED))
self.assertEqual('ORANGE', str(Color.ORANGE))
def testInt(self):
"""Test converting to int."""
self.assertEqual(20, int(Color.RED))
self.assertEqual(2, int(Color.ORANGE))
def testRepr(self):
"""Test enum representation."""
self.assertEqual('Color(RED, 20)', repr(Color.RED))
self.assertEqual('Color(YELLOW, 40)', repr(Color.YELLOW))
def testDocstring(self):
"""Test that docstring is supported ok."""
class NotImportant(messages.Enum):
"""I have a docstring."""
VALUE1 = 1
self.assertEqual('I have a docstring.', NotImportant.__doc__)
def testDeleteEnumValue(self):
"""Test that enum values cannot be deleted."""
self.assertRaises(TypeError, delattr, Color, 'RED')
def testEnumName(self):
"""Test enum name."""
module_name = test_util.get_module_name(EnumTest)
self.assertEqual('%s.Color' % module_name, Color.definition_name())
self.assertEqual(module_name, Color.outer_definition_name())
self.assertEqual(module_name, Color.definition_package())
def testDefinitionName_OverrideModule(self):
"""Test enum module is overriden by module package name."""
global package
try:
package = 'my.package'
self.assertEqual('my.package.Color', Color.definition_name())
self.assertEqual('my.package', Color.outer_definition_name())
self.assertEqual('my.package', Color.definition_package())
finally:
del package
def testDefinitionName_NoModule(self):
"""Test what happens when there is no module for enum."""
class Enum1(messages.Enum):
pass
original_modules = sys.modules
sys.modules = dict(sys.modules)
try:
del sys.modules[__name__]
self.assertEqual('Enum1', Enum1.definition_name())
self.assertEqual(None, Enum1.outer_definition_name())
self.assertEqual(None, Enum1.definition_package())
self.assertEqual(six.text_type, type(Enum1.definition_name()))
finally:
sys.modules = original_modules
def testDefinitionName_Nested(self):
"""Test nested Enum names."""
class MyMessage(messages.Message):
class NestedEnum(messages.Enum):
pass
class NestedMessage(messages.Message):
class NestedEnum(messages.Enum):
pass
module_name = test_util.get_module_name(EnumTest)
self.assertEqual('%s.MyMessage.NestedEnum' % module_name,
MyMessage.NestedEnum.definition_name())
self.assertEqual('%s.MyMessage' % module_name,
MyMessage.NestedEnum.outer_definition_name())
self.assertEqual(module_name,
MyMessage.NestedEnum.definition_package())
self.assertEqual(
'%s.MyMessage.NestedMessage.NestedEnum' % module_name,
MyMessage.NestedMessage.NestedEnum.definition_name())
self.assertEqual(
'%s.MyMessage.NestedMessage' % module_name,
MyMessage.NestedMessage.NestedEnum.outer_definition_name())
self.assertEqual(
module_name,
MyMessage.NestedMessage.NestedEnum.definition_package())
def testMessageDefinition(self):
"""Test that enumeration knows its enclosing message definition."""
class OuterEnum(messages.Enum):
pass
self.assertEqual(None, OuterEnum.message_definition())
class OuterMessage(messages.Message):
class InnerEnum(messages.Enum):
pass
self.assertEqual(
OuterMessage, OuterMessage.InnerEnum.message_definition())
def testComparison(self):
"""Test comparing various enums to different types."""
class Enum1(messages.Enum):
VAL1 = 1
VAL2 = 2
class Enum2(messages.Enum):
VAL1 = 1
self.assertEqual(Enum1.VAL1, Enum1.VAL1)
self.assertNotEqual(Enum1.VAL1, Enum1.VAL2)
self.assertNotEqual(Enum1.VAL1, Enum2.VAL1)
self.assertNotEqual(Enum1.VAL1, 'VAL1')
self.assertNotEqual(Enum1.VAL1, 1)
self.assertNotEqual(Enum1.VAL1, 2)
self.assertNotEqual(Enum1.VAL1, None)
self.assertNotEqual(Enum1.VAL1, Enum2.VAL1)
self.assertTrue(Enum1.VAL1 < Enum1.VAL2)
self.assertTrue(Enum1.VAL2 > Enum1.VAL1)
self.assertNotEqual(1, Enum2.VAL1)
def testPickle(self):
"""Testing pickling and unpickling of Enum instances."""
colors = list(Color)
unpickled = pickle.loads(pickle.dumps(colors))
self.assertEqual(colors, unpickled)
# Unpickling shouldn't create new enum instances.
for i, color in enumerate(colors):
self.assertTrue(color is unpickled[i])
class FieldListTest(test_util.TestCase):
def setUp(self):
self.integer_field = messages.IntegerField(1, repeated=True)
def testConstructor(self):
self.assertEqual([1, 2, 3],
messages.FieldList(self.integer_field, [1, 2, 3]))
self.assertEqual([1, 2, 3],
messages.FieldList(self.integer_field, (1, 2, 3)))
self.assertEqual([], messages.FieldList(self.integer_field, []))
def testNone(self):
self.assertRaises(TypeError, messages.FieldList,
self.integer_field, None)
def testDoNotAutoConvertString(self):
string_field = messages.StringField(1, repeated=True)
self.assertRaises(messages.ValidationError,
messages.FieldList, string_field, 'abc')
def testConstructorCopies(self):
a_list = [1, 3, 6]
field_list = messages.FieldList(self.integer_field, a_list)
self.assertFalse(a_list is field_list)
self.assertFalse(field_list is
messages.FieldList(self.integer_field, field_list))
def testNonRepeatedField(self):
self.assertRaisesWithRegexpMatch(
messages.FieldDefinitionError,
'FieldList may only accept repeated fields',
messages.FieldList,
messages.IntegerField(1),
[])
def testConstructor_InvalidValues(self):
self.assertRaisesWithRegexpMatch(
messages.ValidationError,
re.escape("Expected type %r "
"for IntegerField, found 1 (type %r)"
% (six.integer_types, str)),
messages.FieldList, self.integer_field, ["1", "2", "3"])
def testConstructor_Scalars(self):
self.assertRaisesWithRegexpMatch(
messages.ValidationError,
"IntegerField is repeated. Found: 3",
messages.FieldList, self.integer_field, 3)
self.assertRaisesWithRegexpMatch(
messages.ValidationError,
("IntegerField is repeated. Found: "
"<(list[_]?|sequence)iterator object"),
messages.FieldList, self.integer_field, iter([1, 2, 3]))
def testSetSlice(self):
field_list = messages.FieldList(self.integer_field, [1, 2, 3, 4, 5])
field_list[1:3] = [10, 20]
self.assertEqual([1, 10, 20, 4, 5], field_list)
def testSetSlice_InvalidValues(self):
field_list = messages.FieldList(self.integer_field, [1, 2, 3, 4, 5])
def setslice():
field_list[1:3] = ['10', '20']
msg_re = re.escape("Expected type %r "
"for IntegerField, found 10 (type %r)"
% (six.integer_types, str))
self.assertRaisesWithRegexpMatch(
messages.ValidationError,
msg_re,
setslice)
def testSetItem(self):
field_list = messages.FieldList(self.integer_field, [2])
field_list[0] = 10
self.assertEqual([10], field_list)
def testSetItem_InvalidValues(self):
field_list = messages.FieldList(self.integer_field, [2])
def setitem():
field_list[0] = '10'
self.assertRaisesWithRegexpMatch(
messages.ValidationError,
re.escape("Expected type %r "
"for IntegerField, found 10 (type %r)"
% (six.integer_types, str)),
setitem)
def testAppend(self):
field_list = messages.FieldList(self.integer_field, [2])
field_list.append(10)
self.assertEqual([2, 10], field_list)
def testAppend_InvalidValues(self):
field_list = messages.FieldList(self.integer_field, [2])
field_list.name = 'a_field'
def append():
field_list.append('10')
self.assertRaisesWithRegexpMatch(
messages.ValidationError,
re.escape("Expected type %r "
"for IntegerField, found 10 (type %r)"
% (six.integer_types, str)),
append)
def testExtend(self):
field_list = messages.FieldList(self.integer_field, [2])
field_list.extend([10])
self.assertEqual([2, 10], field_list)
def testExtend_InvalidValues(self):
field_list = messages.FieldList(self.integer_field, [2])
def extend():
field_list.extend(['10'])
self.assertRaisesWithRegexpMatch(
messages.ValidationError,
re.escape("Expected type %r "
"for IntegerField, found 10 (type %r)"
% (six.integer_types, str)),
extend)
def testInsert(self):
field_list = messages.FieldList(self.integer_field, [2, 3])
field_list.insert(1, 10)
self.assertEqual([2, 10, 3], field_list)
def testInsert_InvalidValues(self):
field_list = messages.FieldList(self.integer_field, [2, 3])
def insert():
field_list.insert(1, '10')
self.assertRaisesWithRegexpMatch(
messages.ValidationError,
re.escape("Expected type %r "
"for IntegerField, found 10 (type %r)"
% (six.integer_types, str)),
insert)
def testPickle(self):
"""Testing pickling and unpickling of FieldList instances."""
field_list = messages.FieldList(self.integer_field, [1, 2, 3, 4, 5])
unpickled = pickle.loads(pickle.dumps(field_list))
self.assertEqual(field_list, unpickled)
self.assertIsInstance(unpickled.field, messages.IntegerField)
self.assertEqual(1, unpickled.field.number)
self.assertTrue(unpickled.field.repeated)
class FieldTest(test_util.TestCase):
def ActionOnAllFieldClasses(self, action):
"""Test all field classes except Message and Enum.
Message and Enum require separate tests.
Args:
action: Callable that takes the field class as a parameter.
"""
classes = (messages.IntegerField,
messages.FloatField,
messages.BooleanField,
messages.BytesField,
messages.StringField)
for field_class in classes:
action(field_class)
def testNumberAttribute(self):
"""Test setting the number attribute."""
def action(field_class):
# Check range.
self.assertRaises(messages.InvalidNumberError,
field_class,
0)
self.assertRaises(messages.InvalidNumberError,
field_class,
-1)
self.assertRaises(messages.InvalidNumberError,
field_class,
messages.MAX_FIELD_NUMBER + 1)
# Check reserved.
self.assertRaises(messages.InvalidNumberError,
field_class,
messages.FIRST_RESERVED_FIELD_NUMBER)
self.assertRaises(messages.InvalidNumberError,
field_class,
messages.LAST_RESERVED_FIELD_NUMBER)
self.assertRaises(messages.InvalidNumberError,
field_class,
'1')
# This one should work.
field_class(number=1)
self.ActionOnAllFieldClasses(action)
def testRequiredAndRepeated(self):
"""Test setting the required and repeated fields."""
def action(field_class):
field_class(1, required=True)
field_class(1, repeated=True)
self.assertRaises(messages.FieldDefinitionError,
field_class,
1,
required=True,
repeated=True)
self.ActionOnAllFieldClasses(action)
def testInvalidVariant(self):
"""Test field with invalid variants."""
def action(field_class):
if field_class is not message_types.DateTimeField:
self.assertRaises(messages.InvalidVariantError,
field_class,
1,
variant=messages.Variant.ENUM)
self.ActionOnAllFieldClasses(action)
def testDefaultVariant(self):
"""Test that default variant is used when not set."""
def action(field_class):
field = field_class(1)
self.assertEqual(field_class.DEFAULT_VARIANT, field.variant)
self.ActionOnAllFieldClasses(action)
def testAlternateVariant(self):
"""Test that default variant is used when not set."""
field = messages.IntegerField(1, variant=messages.Variant.UINT32)
self.assertEqual(messages.Variant.UINT32, field.variant)
def testDefaultFields_Single(self):
"""Test default field is correct type (single)."""
defaults = {
messages.IntegerField: 10,
messages.FloatField: 1.5,
messages.BooleanField: False,
messages.BytesField: b'abc',
messages.StringField: u'abc',
}
def action(field_class):
field_class(1, default=defaults[field_class])
self.ActionOnAllFieldClasses(action)
# Run defaults test again checking for str/unicode compatiblity.
defaults[messages.StringField] = 'abc'
self.ActionOnAllFieldClasses(action)
def testStringField_BadUnicodeInDefault(self):
"""Test binary values in string field."""
self.assertRaisesWithRegexpMatch(
messages.InvalidDefaultError,
r"Invalid default value for StringField:.*: "
r"Field encountered non-UTF-8 string .*: "
r"'utf.?8' codec can't decode byte 0xc3 in position 0: "
r"invalid continuation byte",
messages.StringField, 1, default=b'\xc3\x28')
def testDefaultFields_InvalidSingle(self):
"""Test default field is correct type (invalid single)."""
def action(field_class):
self.assertRaises(messages.InvalidDefaultError,
field_class,
1,
default=object())
self.ActionOnAllFieldClasses(action)
def testDefaultFields_InvalidRepeated(self):
"""Test default field does not accept defaults."""
self.assertRaisesWithRegexpMatch(
messages.FieldDefinitionError,
'Repeated fields may not have defaults',
messages.StringField, 1, repeated=True, default=[1, 2, 3])
def testDefaultFields_None(self):
"""Test none is always acceptable."""
def action(field_class):
field_class(1, default=None)
field_class(1, required=True, default=None)
field_class(1, repeated=True, default=None)
self.ActionOnAllFieldClasses(action)
def testDefaultFields_Enum(self):
"""Test the default for enum fields."""
class Symbol(messages.Enum):
ALPHA = 1
BETA = 2
GAMMA = 3
field = messages.EnumField(Symbol, 1, default=Symbol.ALPHA)
self.assertEqual(Symbol.ALPHA, field.default)
def testDefaultFields_EnumStringDelayedResolution(self):
"""Test that enum fields resolve default strings."""
field = messages.EnumField(
'apitools.base.protorpclite.descriptor.FieldDescriptor.Label',
1,
default='OPTIONAL')
self.assertEqual(
descriptor.FieldDescriptor.Label.OPTIONAL, field.default)
def testDefaultFields_EnumIntDelayedResolution(self):
"""Test that enum fields resolve default integers."""
field = messages.EnumField(
'apitools.base.protorpclite.descriptor.FieldDescriptor.Label',
1,
default=2)
self.assertEqual(
descriptor.FieldDescriptor.Label.REQUIRED, field.default)
def testDefaultFields_EnumOkIfTypeKnown(self):
"""Test enum fields accept valid default values when type is known."""
field = messages.EnumField(descriptor.FieldDescriptor.Label,
1,
default='REPEATED')
self.assertEqual(
descriptor.FieldDescriptor.Label.REPEATED, field.default)
def testDefaultFields_EnumForceCheckIfTypeKnown(self):
"""Test that enum fields validate default values if type is known."""
self.assertRaisesWithRegexpMatch(TypeError,
'No such value for NOT_A_LABEL in '
'Enum Label',
messages.EnumField,
descriptor.FieldDescriptor.Label,
1,
default='NOT_A_LABEL')
def testDefaultFields_EnumInvalidDelayedResolution(self):
"""Test that enum fields raise errors upon delayed resolution error."""
field = messages.EnumField(
'apitools.base.protorpclite.descriptor.FieldDescriptor.Label',
1,
default=200)
self.assertRaisesWithRegexpMatch(TypeError,
'No such value for 200 in Enum Label',
getattr,
field,
'default')
def testValidate_Valid(self):
"""Test validation of valid values."""
values = {
messages.IntegerField: 10,
messages.FloatField: 1.5,
messages.BooleanField: False,
messages.BytesField: b'abc',
messages.StringField: u'abc',
}
def action(field_class):
# Optional.
field = field_class(1)
field.validate(values[field_class])
# Required.
field = field_class(1, required=True)
field.validate(values[field_class])
# Repeated.
field = field_class(1, repeated=True)
field.validate([])
field.validate(())
field.validate([values[field_class]])
field.validate((values[field_class],))
# Right value, but not repeated.
self.assertRaises(messages.ValidationError,
field.validate,
values[field_class])
self.assertRaises(messages.ValidationError,
field.validate,
values[field_class])
self.ActionOnAllFieldClasses(action)
def testValidate_Invalid(self):
"""Test validation of valid values."""
values = {
messages.IntegerField: "10",
messages.FloatField: "blah",
messages.BooleanField: 0,
messages.BytesField: 10.20,
messages.StringField: 42,
}
def action(field_class):
# Optional.
field = field_class(1)
self.assertRaises(messages.ValidationError,
field.validate,
values[field_class])
# Required.
field = field_class(1, required=True)
self.assertRaises(messages.ValidationError,
field.validate,
values[field_class])
# Repeated.
field = field_class(1, repeated=True)
self.assertRaises(messages.ValidationError,
field.validate,
[values[field_class]])
self.assertRaises(messages.ValidationError,
field.validate,
(values[field_class],))
self.ActionOnAllFieldClasses(action)
def testValidate_None(self):
"""Test that None is valid for non-required fields."""
def action(field_class):
# Optional.
field = field_class(1)
field.validate(None)
# Required.
field = field_class(1, required=True)
self.assertRaisesWithRegexpMatch(messages.ValidationError,
'Required field is missing',
field.validate,
None)
# Repeated.
field = field_class(1, repeated=True)
field.validate(None)
self.assertRaisesWithRegexpMatch(
messages.ValidationError,
'Repeated values for %s may '
'not be None' % field_class.__name__,
field.validate,
[None])
self.assertRaises(messages.ValidationError,
field.validate,
(None,))
self.ActionOnAllFieldClasses(action)
def testValidateElement(self):
"""Test validation of valid values."""
values = {
messages.IntegerField: (10, -1, 0),
messages.FloatField: (1.5, -1.5, 3), # for json it is all a number
messages.BooleanField: (True, False),
messages.BytesField: (b'abc',),
messages.StringField: (u'abc',),
}
def action(field_class):
# Optional.
field = field_class(1)
for value in values[field_class]:
field.validate_element(value)
# Required.
field = field_class(1, required=True)
for value in values[field_class]:
field.validate_element(value)
# Repeated.
field = field_class(1, repeated=True)
self.assertRaises(messages.ValidationError,
field.validate_element,
[])
self.assertRaises(messages.ValidationError,
field.validate_element,
())
for value in values[field_class]:
field.validate_element(value)
# Right value, but repeated.
self.assertRaises(messages.ValidationError,
field.validate_element,
list(values[field_class])) # testing list
self.assertRaises(messages.ValidationError,
field.validate_element,
values[field_class]) # testing tuple
self.ActionOnAllFieldClasses(action)
def testValidateCastingElement(self):
field = messages.FloatField(1)
self.assertEqual(type(field.validate_element(12)), float)
self.assertEqual(type(field.validate_element(12.0)), float)
field = messages.IntegerField(1)
self.assertEqual(type(field.validate_element(12)), int)
self.assertRaises(messages.ValidationError,
field.validate_element,
12.0) # should fails from float to int
def testReadOnly(self):
"""Test that objects are all read-only."""
def action(field_class):
field = field_class(10)
self.assertRaises(AttributeError,
setattr,
field,
'number',
20)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | true |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/protorpclite/descriptor_test.py | apitools/base/protorpclite/descriptor_test.py | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for apitools.base.protorpclite.descriptor."""
import platform
import types
import unittest
import six
from apitools.base.protorpclite import descriptor
from apitools.base.protorpclite import message_types
from apitools.base.protorpclite import messages
from apitools.base.protorpclite import test_util
RUSSIA = u'\u0420\u043e\u0441\u0441\u0438\u044f'
class ModuleInterfaceTest(test_util.ModuleInterfaceTest,
test_util.TestCase):
MODULE = descriptor
class DescribeEnumValueTest(test_util.TestCase):
def testDescribe(self):
class MyEnum(messages.Enum):
MY_NAME = 10
expected = descriptor.EnumValueDescriptor()
expected.name = 'MY_NAME'
expected.number = 10
described = descriptor.describe_enum_value(MyEnum.MY_NAME)
described.check_initialized()
self.assertEqual(expected, described)
class DescribeEnumTest(test_util.TestCase):
def testEmptyEnum(self):
class EmptyEnum(messages.Enum):
pass
expected = descriptor.EnumDescriptor()
expected.name = 'EmptyEnum'
described = descriptor.describe_enum(EmptyEnum)
described.check_initialized()
self.assertEqual(expected, described)
def testNestedEnum(self):
class MyScope(messages.Message):
class NestedEnum(messages.Enum):
pass
expected = descriptor.EnumDescriptor()
expected.name = 'NestedEnum'
described = descriptor.describe_enum(MyScope.NestedEnum)
described.check_initialized()
self.assertEqual(expected, described)
@unittest.skipIf('PyPy' in platform.python_implementation(),
'todo: reenable this')
def testEnumWithItems(self):
class EnumWithItems(messages.Enum):
A = 3
B = 1
C = 2
expected = descriptor.EnumDescriptor()
expected.name = 'EnumWithItems'
a = descriptor.EnumValueDescriptor()
a.name = 'A'
a.number = 3
b = descriptor.EnumValueDescriptor()
b.name = 'B'
b.number = 1
c = descriptor.EnumValueDescriptor()
c.name = 'C'
c.number = 2
expected.values = [b, c, a]
described = descriptor.describe_enum(EnumWithItems)
described.check_initialized()
self.assertEqual(expected, described)
class DescribeFieldTest(test_util.TestCase):
def testLabel(self):
for repeated, required, expected_label in (
(True, False, descriptor.FieldDescriptor.Label.REPEATED),
(False, True, descriptor.FieldDescriptor.Label.REQUIRED),
(False, False, descriptor.FieldDescriptor.Label.OPTIONAL)):
field = messages.IntegerField(
10, required=required, repeated=repeated)
field.name = 'a_field'
expected = descriptor.FieldDescriptor()
expected.name = 'a_field'
expected.number = 10
expected.label = expected_label
expected.variant = descriptor.FieldDescriptor.Variant.INT64
described = descriptor.describe_field(field)
described.check_initialized()
self.assertEqual(expected, described)
def testDefault(self):
test_cases = (
(messages.IntegerField, 200, '200'),
(messages.FloatField, 1.5, '1.5'),
(messages.FloatField, 1e6, '1000000.0'),
(messages.BooleanField, True, 'true'),
(messages.BooleanField, False, 'false'),
(messages.BytesField,
b''.join([six.int2byte(x) for x in (31, 32, 33)]),
b'\\x1f !'),
(messages.StringField, RUSSIA, RUSSIA),
)
for field_class, default, expected_default in test_cases:
field = field_class(10, default=default)
field.name = u'a_field'
expected = descriptor.FieldDescriptor()
expected.name = u'a_field'
expected.number = 10
expected.label = descriptor.FieldDescriptor.Label.OPTIONAL
expected.variant = field_class.DEFAULT_VARIANT
expected.default_value = expected_default
described = descriptor.describe_field(field)
described.check_initialized()
self.assertEqual(expected, described)
def testDefault_EnumField(self):
class MyEnum(messages.Enum):
VAL = 1
module_name = test_util.get_module_name(MyEnum)
field = messages.EnumField(MyEnum, 10, default=MyEnum.VAL)
field.name = 'a_field'
expected = descriptor.FieldDescriptor()
expected.name = 'a_field'
expected.number = 10
expected.label = descriptor.FieldDescriptor.Label.OPTIONAL
expected.variant = messages.EnumField.DEFAULT_VARIANT
expected.type_name = '%s.MyEnum' % module_name
expected.default_value = '1'
described = descriptor.describe_field(field)
self.assertEqual(expected, described)
def testMessageField(self):
field = messages.MessageField(descriptor.FieldDescriptor, 10)
field.name = 'a_field'
expected = descriptor.FieldDescriptor()
expected.name = 'a_field'
expected.number = 10
expected.label = descriptor.FieldDescriptor.Label.OPTIONAL
expected.variant = messages.MessageField.DEFAULT_VARIANT
expected.type_name = (
'apitools.base.protorpclite.descriptor.FieldDescriptor')
described = descriptor.describe_field(field)
described.check_initialized()
self.assertEqual(expected, described)
def testDateTimeField(self):
field = message_types.DateTimeField(20)
field.name = 'a_timestamp'
expected = descriptor.FieldDescriptor()
expected.name = 'a_timestamp'
expected.number = 20
expected.label = descriptor.FieldDescriptor.Label.OPTIONAL
expected.variant = messages.MessageField.DEFAULT_VARIANT
expected.type_name = (
'apitools.base.protorpclite.message_types.DateTimeMessage')
described = descriptor.describe_field(field)
described.check_initialized()
self.assertEqual(expected, described)
class DescribeMessageTest(test_util.TestCase):
def testEmptyDefinition(self):
class MyMessage(messages.Message):
pass
expected = descriptor.MessageDescriptor()
expected.name = 'MyMessage'
described = descriptor.describe_message(MyMessage)
described.check_initialized()
self.assertEqual(expected, described)
def testDefinitionWithFields(self):
class MessageWithFields(messages.Message):
field1 = messages.IntegerField(10)
field2 = messages.StringField(30)
field3 = messages.IntegerField(20)
expected = descriptor.MessageDescriptor()
expected.name = 'MessageWithFields'
expected.fields = [
descriptor.describe_field(
MessageWithFields.field_by_name('field1')),
descriptor.describe_field(
MessageWithFields.field_by_name('field3')),
descriptor.describe_field(
MessageWithFields.field_by_name('field2')),
]
described = descriptor.describe_message(MessageWithFields)
described.check_initialized()
self.assertEqual(expected, described)
def testNestedEnum(self):
class MessageWithEnum(messages.Message):
class Mood(messages.Enum):
GOOD = 1
BAD = 2
UGLY = 3
class Music(messages.Enum):
CLASSIC = 1
JAZZ = 2
BLUES = 3
expected = descriptor.MessageDescriptor()
expected.name = 'MessageWithEnum'
expected.enum_types = [descriptor.describe_enum(MessageWithEnum.Mood),
descriptor.describe_enum(MessageWithEnum.Music)]
described = descriptor.describe_message(MessageWithEnum)
described.check_initialized()
self.assertEqual(expected, described)
def testNestedMessage(self):
class MessageWithMessage(messages.Message):
class Nesty(messages.Message):
pass
expected = descriptor.MessageDescriptor()
expected.name = 'MessageWithMessage'
expected.message_types = [
descriptor.describe_message(MessageWithMessage.Nesty)]
described = descriptor.describe_message(MessageWithMessage)
described.check_initialized()
self.assertEqual(expected, described)
class DescribeFileTest(test_util.TestCase):
"""Test describing modules."""
def LoadModule(self, module_name, source):
result = {
'__name__': module_name,
'messages': messages,
}
exec(source, result)
module = types.ModuleType(module_name)
for name, value in result.items():
setattr(module, name, value)
return module
def testEmptyModule(self):
"""Test describing an empty file."""
module = types.ModuleType('my.package.name')
expected = descriptor.FileDescriptor()
expected.package = 'my.package.name'
described = descriptor.describe_file(module)
described.check_initialized()
self.assertEqual(expected, described)
def testNoPackageName(self):
"""Test describing a module with no module name."""
module = types.ModuleType('')
expected = descriptor.FileDescriptor()
described = descriptor.describe_file(module)
described.check_initialized()
self.assertEqual(expected, described)
def testPackageName(self):
"""Test using the 'package' module attribute."""
module = types.ModuleType('my.module.name')
module.package = 'my.package.name'
expected = descriptor.FileDescriptor()
expected.package = 'my.package.name'
described = descriptor.describe_file(module)
described.check_initialized()
self.assertEqual(expected, described)
def testMain(self):
"""Test using the 'package' module attribute."""
module = types.ModuleType('__main__')
module.__file__ = '/blim/blam/bloom/my_package.py'
expected = descriptor.FileDescriptor()
expected.package = 'my_package'
described = descriptor.describe_file(module)
described.check_initialized()
self.assertEqual(expected, described)
def testMessages(self):
"""Test that messages are described."""
module = self.LoadModule('my.package',
'class Message1(messages.Message): pass\n'
'class Message2(messages.Message): pass\n')
message1 = descriptor.MessageDescriptor()
message1.name = 'Message1'
message2 = descriptor.MessageDescriptor()
message2.name = 'Message2'
expected = descriptor.FileDescriptor()
expected.package = 'my.package'
expected.message_types = [message1, message2]
described = descriptor.describe_file(module)
described.check_initialized()
self.assertEqual(expected, described)
def testEnums(self):
"""Test that enums are described."""
module = self.LoadModule('my.package',
'class Enum1(messages.Enum): pass\n'
'class Enum2(messages.Enum): pass\n')
enum1 = descriptor.EnumDescriptor()
enum1.name = 'Enum1'
enum2 = descriptor.EnumDescriptor()
enum2.name = 'Enum2'
expected = descriptor.FileDescriptor()
expected.package = 'my.package'
expected.enum_types = [enum1, enum2]
described = descriptor.describe_file(module)
described.check_initialized()
self.assertEqual(expected, described)
class DescribeFileSetTest(test_util.TestCase):
"""Test describing multiple modules."""
def testNoModules(self):
"""Test what happens when no modules provided."""
described = descriptor.describe_file_set([])
described.check_initialized()
# The described FileSet.files will be None.
self.assertEqual(descriptor.FileSet(), described)
def testWithModules(self):
"""Test what happens when no modules provided."""
modules = [types.ModuleType('package1'), types.ModuleType('package1')]
file1 = descriptor.FileDescriptor()
file1.package = 'package1'
file2 = descriptor.FileDescriptor()
file2.package = 'package2'
expected = descriptor.FileSet()
expected.files = [file1, file1]
described = descriptor.describe_file_set(modules)
described.check_initialized()
self.assertEqual(expected, described)
class DescribeTest(test_util.TestCase):
def testModule(self):
self.assertEqual(descriptor.describe_file(test_util),
descriptor.describe(test_util))
def testField(self):
self.assertEqual(
descriptor.describe_field(test_util.NestedMessage.a_value),
descriptor.describe(test_util.NestedMessage.a_value))
def testEnumValue(self):
self.assertEqual(
descriptor.describe_enum_value(
test_util.OptionalMessage.SimpleEnum.VAL1),
descriptor.describe(test_util.OptionalMessage.SimpleEnum.VAL1))
def testMessage(self):
self.assertEqual(descriptor.describe_message(test_util.NestedMessage),
descriptor.describe(test_util.NestedMessage))
def testEnum(self):
self.assertEqual(
descriptor.describe_enum(test_util.OptionalMessage.SimpleEnum),
descriptor.describe(test_util.OptionalMessage.SimpleEnum))
def testUndescribable(self):
class NonService(object):
def fn(self):
pass
for value in (NonService,
NonService.fn,
1,
'string',
1.2,
None):
self.assertEqual(None, descriptor.describe(value))
class ModuleFinderTest(test_util.TestCase):
def testFindMessage(self):
self.assertEqual(
descriptor.describe_message(descriptor.FileSet),
descriptor.import_descriptor_loader(
'apitools.base.protorpclite.descriptor.FileSet'))
def testFindField(self):
self.assertEqual(
descriptor.describe_field(descriptor.FileSet.files),
descriptor.import_descriptor_loader(
'apitools.base.protorpclite.descriptor.FileSet.files'))
def testFindEnumValue(self):
self.assertEqual(
descriptor.describe_enum_value(
test_util.OptionalMessage.SimpleEnum.VAL1),
descriptor.import_descriptor_loader(
'apitools.base.protorpclite.test_util.'
'OptionalMessage.SimpleEnum.VAL1'))
class DescriptorLibraryTest(test_util.TestCase):
def setUp(self):
self.packageless = descriptor.MessageDescriptor()
self.packageless.name = 'Packageless'
self.library = descriptor.DescriptorLibrary(
descriptors={
'not.real.Packageless': self.packageless,
'Packageless': self.packageless,
})
def testLookupPackage(self):
self.assertEqual('csv', self.library.lookup_package('csv'))
self.assertEqual(
'apitools.base.protorpclite',
self.library.lookup_package('apitools.base.protorpclite'))
def testLookupNonPackages(self):
lib = 'apitools.base.protorpclite.descriptor.DescriptorLibrary'
for name in ('', 'a', lib):
self.assertRaisesWithRegexpMatch(
messages.DefinitionNotFoundError,
'Could not find definition for %s' % name,
self.library.lookup_package, name)
def testNoPackage(self):
self.assertRaisesWithRegexpMatch(
messages.DefinitionNotFoundError,
'Could not find definition for not.real',
self.library.lookup_package, 'not.real.Packageless')
self.assertEqual(None, self.library.lookup_package('Packageless'))
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/protorpclite/__init__.py | apitools/base/protorpclite/__init__.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared __init__.py for apitools."""
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/protorpclite/messages.py | apitools/base/protorpclite/messages.py | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=too-many-lines
"""Stand-alone implementation of in memory protocol messages.
Public Classes:
Enum: Represents an enumerated type.
Variant: Hint for wire format to determine how to serialize.
Message: Base class for user defined messages.
IntegerField: Field for integer values.
FloatField: Field for float values.
BooleanField: Field for boolean values.
BytesField: Field for binary string values.
StringField: Field for UTF-8 string values.
MessageField: Field for other message type values.
EnumField: Field for enumerated type values.
Public Exceptions (indentation indications class hierarchy):
EnumDefinitionError: Raised when enumeration is incorrectly defined.
FieldDefinitionError: Raised when field is incorrectly defined.
InvalidVariantError: Raised when variant is not compatible with field type.
InvalidDefaultError: Raised when default is not compatiable with field.
InvalidNumberError: Raised when field number is out of range or reserved.
MessageDefinitionError: Raised when message is incorrectly defined.
DuplicateNumberError: Raised when field has duplicate number with another.
ValidationError: Raised when a message or field is not valid.
DefinitionNotFoundError: Raised when definition not found.
"""
import types
import weakref
import six
from apitools.base.protorpclite import util
__all__ = [
'MAX_ENUM_VALUE',
'MAX_FIELD_NUMBER',
'FIRST_RESERVED_FIELD_NUMBER',
'LAST_RESERVED_FIELD_NUMBER',
'Enum',
'Field',
'FieldList',
'Variant',
'Message',
'IntegerField',
'FloatField',
'BooleanField',
'BytesField',
'StringField',
'MessageField',
'EnumField',
'find_definition',
'Error',
'DecodeError',
'EncodeError',
'EnumDefinitionError',
'FieldDefinitionError',
'InvalidVariantError',
'InvalidDefaultError',
'InvalidNumberError',
'MessageDefinitionError',
'DuplicateNumberError',
'ValidationError',
'DefinitionNotFoundError',
]
# pylint:disable=attribute-defined-outside-init
# pylint:disable=protected-access
# TODO(rafek): Add extended module test to ensure all exceptions
# in services extends Error.
Error = util.Error
class EnumDefinitionError(Error):
"""Enumeration definition error."""
class FieldDefinitionError(Error):
"""Field definition error."""
class InvalidVariantError(FieldDefinitionError):
"""Invalid variant provided to field."""
class InvalidDefaultError(FieldDefinitionError):
"""Invalid default provided to field."""
class InvalidNumberError(FieldDefinitionError):
"""Invalid number provided to field."""
class MessageDefinitionError(Error):
"""Message definition error."""
class DuplicateNumberError(Error):
"""Duplicate number assigned to field."""
class DefinitionNotFoundError(Error):
"""Raised when definition is not found."""
class DecodeError(Error):
"""Error found decoding message from encoded form."""
class EncodeError(Error):
"""Error found when encoding message."""
class ValidationError(Error):
"""Invalid value for message error."""
def __str__(self):
"""Prints string with field name if present on exception."""
return Error.__str__(self)
# Attributes that are reserved by a class definition that
# may not be used by either Enum or Message class definitions.
_RESERVED_ATTRIBUTE_NAMES = frozenset(
['__module__', '__doc__', '__qualname__', '__static_attributes__', '__firstlineno__'])
_POST_INIT_FIELD_ATTRIBUTE_NAMES = frozenset(
['name',
'_message_definition',
'_MessageField__type',
'_EnumField__type',
'_EnumField__resolved_default'])
_POST_INIT_ATTRIBUTE_NAMES = frozenset(
['_message_definition'])
# Maximum enumeration value as defined by the protocol buffers standard.
# All enum values must be less than or equal to this value.
MAX_ENUM_VALUE = (2 ** 29) - 1
# Maximum field number as defined by the protocol buffers standard.
# All field numbers must be less than or equal to this value.
MAX_FIELD_NUMBER = (2 ** 29) - 1
# Field numbers between 19000 and 19999 inclusive are reserved by the
# protobuf protocol and may not be used by fields.
FIRST_RESERVED_FIELD_NUMBER = 19000
LAST_RESERVED_FIELD_NUMBER = 19999
# pylint: disable=no-value-for-parameter
class _DefinitionClass(type):
"""Base meta-class used for definition meta-classes.
The Enum and Message definition classes share some basic functionality.
Both of these classes may be contained by a Message definition. After
initialization, neither class may have attributes changed
except for the protected _message_definition attribute, and that attribute
may change only once.
"""
__initialized = False # pylint:disable=invalid-name
def __init__(cls, name, bases, dct):
"""Constructor."""
type.__init__(cls, name, bases, dct)
# Base classes may never be initialized.
if cls.__bases__ != (object,):
cls.__initialized = True
def message_definition(cls):
"""Get outer Message definition that contains this definition.
Returns:
Containing Message definition if definition is contained within one,
else None.
"""
try:
return cls._message_definition()
except AttributeError:
return None
def __setattr__(cls, name, value):
"""Overridden to avoid setting variables after init.
Setting attributes on a class must work during the period of
initialization to set the enumation value class variables and
build the name/number maps. Once __init__ has set the
__initialized flag to True prohibits setting any more values
on the class. The class is in effect frozen.
Args:
name: Name of value to set.
value: Value to set.
"""
if cls.__initialized and name not in _POST_INIT_ATTRIBUTE_NAMES:
raise AttributeError('May not change values: %s' % name)
else:
type.__setattr__(cls, name, value)
def __delattr__(cls, name):
"""Overridden so that cannot delete varaibles on definition classes."""
raise TypeError('May not delete attributes on definition class')
def definition_name(cls):
"""Helper method for creating definition name.
Names will be generated to include the classes package name,
scope (if the class is nested in another definition) and class
name.
By default, the package name for a definition is derived from
its module name. However, this value can be overriden by
placing a 'package' attribute in the module that contains the
definition class. For example:
package = 'some.alternate.package'
class MyMessage(Message):
...
>>> MyMessage.definition_name()
some.alternate.package.MyMessage
Returns:
Dot-separated fully qualified name of definition.
"""
outer_definition_name = cls.outer_definition_name()
if outer_definition_name is None:
return six.text_type(cls.__name__)
return u'%s.%s' % (outer_definition_name, cls.__name__)
def outer_definition_name(cls):
"""Helper method for creating outer definition name.
Returns:
If definition is nested, will return the outer definitions
name, else the package name.
"""
outer_definition = cls.message_definition()
if not outer_definition:
return util.get_package_for_module(cls.__module__)
return outer_definition.definition_name()
def definition_package(cls):
"""Helper method for creating creating the package of a definition.
Returns:
Name of package that definition belongs to.
"""
outer_definition = cls.message_definition()
if not outer_definition:
return util.get_package_for_module(cls.__module__)
return outer_definition.definition_package()
class _EnumClass(_DefinitionClass):
"""Meta-class used for defining the Enum base class.
Meta-class enables very specific behavior for any defined Enum
class. All attributes defined on an Enum sub-class must be integers.
Each attribute defined on an Enum sub-class is translated
into an instance of that sub-class, with the name of the attribute
as its name, and the number provided as its value. It also ensures
that only one level of Enum class hierarchy is possible. In other
words it is not possible to delcare sub-classes of sub-classes of
Enum.
This class also defines some functions in order to restrict the
behavior of the Enum class and its sub-classes. It is not possible
to change the behavior of the Enum class in later classes since
any new classes may be defined with only integer values, and no methods.
"""
def __init__(cls, name, bases, dct):
# Can only define one level of sub-classes below Enum.
if not (bases == (object,) or bases == (Enum,)):
raise EnumDefinitionError(
'Enum type %s may only inherit from Enum' % name)
cls.__by_number = {}
cls.__by_name = {}
# Enum base class does not need to be initialized or locked.
if bases != (object,):
# Replace integer with number.
for attribute, value in dct.items():
# Module will be in every enum class.
if attribute in _RESERVED_ATTRIBUTE_NAMES:
continue
# Reject anything that is not an int.
if not isinstance(value, six.integer_types):
raise EnumDefinitionError(
'May only use integers in Enum definitions. '
'Found: %s = %s' %
(attribute, value))
# Protocol buffer standard recommends non-negative values.
# Reject negative values.
if value < 0:
raise EnumDefinitionError(
'Must use non-negative enum values. Found: %s = %d' %
(attribute, value))
if value > MAX_ENUM_VALUE:
raise EnumDefinitionError(
'Must use enum values less than or equal %d. '
'Found: %s = %d' %
(MAX_ENUM_VALUE, attribute, value))
if value in cls.__by_number:
raise EnumDefinitionError(
'Value for %s = %d is already defined: %s' %
(attribute, value, cls.__by_number[value].name))
# Create enum instance and list in new Enum type.
instance = object.__new__(cls)
# pylint:disable=non-parent-init-called
cls.__init__(instance, attribute, value)
cls.__by_name[instance.name] = instance
cls.__by_number[instance.number] = instance
setattr(cls, attribute, instance)
_DefinitionClass.__init__(cls, name, bases, dct)
def __iter__(cls):
"""Iterate over all values of enum.
Yields:
Enumeration instances of the Enum class in arbitrary order.
"""
return iter(cls.__by_number.values())
def names(cls):
"""Get all names for Enum.
Returns:
An iterator for names of the enumeration in arbitrary order.
"""
return cls.__by_name.keys()
def numbers(cls):
"""Get all numbers for Enum.
Returns:
An iterator for all numbers of the enumeration in arbitrary order.
"""
return cls.__by_number.keys()
def lookup_by_name(cls, name):
"""Look up Enum by name.
Args:
name: Name of enum to find.
Returns:
Enum sub-class instance of that value.
"""
return cls.__by_name[name]
def lookup_by_number(cls, number):
"""Look up Enum by number.
Args:
number: Number of enum to find.
Returns:
Enum sub-class instance of that value.
"""
return cls.__by_number[number]
def __len__(cls):
return len(cls.__by_name)
class Enum(six.with_metaclass(_EnumClass, object)):
"""Base class for all enumerated types."""
__slots__ = set(('name', 'number'))
def __new__(cls, index):
"""Acts as look-up routine after class is initialized.
The purpose of overriding __new__ is to provide a way to treat
Enum subclasses as casting types, similar to how the int type
functions. A program can pass a string or an integer and this
method with "convert" that value in to an appropriate Enum instance.
Args:
index: Name or number to look up. During initialization
this is always the name of the new enum value.
Raises:
TypeError: When an inappropriate index value is passed provided.
"""
# If is enum type of this class, return it.
if isinstance(index, cls):
return index
# If number, look up by number.
if isinstance(index, six.integer_types):
try:
return cls.lookup_by_number(index)
except KeyError:
pass
# If name, look up by name.
if isinstance(index, six.string_types):
try:
return cls.lookup_by_name(index)
except KeyError:
pass
raise TypeError('No such value for %s in Enum %s' %
(index, cls.__name__))
def __init__(self, name, number=None):
"""Initialize new Enum instance.
Since this should only be called during class initialization any
calls that happen after the class is frozen raises an exception.
"""
# Immediately return if __init__ was called after _Enum.__init__().
# It means that casting operator version of the class constructor
# is being used.
if getattr(type(self), '_DefinitionClass__initialized'):
return
object.__setattr__(self, 'name', name)
object.__setattr__(self, 'number', number)
def __setattr__(self, name, value):
raise TypeError('May not change enum values')
def __str__(self):
return self.name
def __int__(self):
return self.number
def __repr__(self):
return '%s(%s, %d)' % (type(self).__name__, self.name, self.number)
def __reduce__(self):
"""Enable pickling.
Returns:
A 2-tuple containing the class and __new__ args to be used
for restoring a pickled instance.
"""
return self.__class__, (self.number,)
def __cmp__(self, other):
"""Order is by number."""
if isinstance(other, type(self)):
return cmp(self.number, other.number)
return NotImplemented
def __lt__(self, other):
"""Order is by number."""
if isinstance(other, type(self)):
return self.number < other.number
return NotImplemented
def __le__(self, other):
"""Order is by number."""
if isinstance(other, type(self)):
return self.number <= other.number
return NotImplemented
def __eq__(self, other):
"""Order is by number."""
if isinstance(other, type(self)):
return self.number == other.number
return NotImplemented
def __ne__(self, other):
"""Order is by number."""
if isinstance(other, type(self)):
return self.number != other.number
return NotImplemented
def __ge__(self, other):
"""Order is by number."""
if isinstance(other, type(self)):
return self.number >= other.number
return NotImplemented
def __gt__(self, other):
"""Order is by number."""
if isinstance(other, type(self)):
return self.number > other.number
return NotImplemented
def __hash__(self):
"""Hash by number."""
return hash(self.number)
@classmethod
def to_dict(cls):
"""Make dictionary version of enumerated class.
Dictionary created this way can be used with def_num.
Returns:
A dict (name) -> number
"""
return dict((item.name, item.number) for item in iter(cls))
@staticmethod
def def_enum(dct, name):
"""Define enum class from dictionary.
Args:
dct: Dictionary of enumerated values for type.
name: Name of enum.
"""
return type(name, (Enum,), dct)
# TODO(rafek): Determine to what degree this enumeration should be compatible
# with FieldDescriptor.Type in https://github.com/google/protobuf.
class Variant(Enum):
"""Wire format variant.
Used by the 'protobuf' wire format to determine how to transmit
a single piece of data. May be used by other formats.
See: http://code.google.com/apis/protocolbuffers/docs/encoding.html
Values:
DOUBLE: 64-bit floating point number.
FLOAT: 32-bit floating point number.
INT64: 64-bit signed integer.
UINT64: 64-bit unsigned integer.
INT32: 32-bit signed integer.
BOOL: Boolean value (True or False).
STRING: String of UTF-8 encoded text.
MESSAGE: Embedded message as byte string.
BYTES: String of 8-bit bytes.
UINT32: 32-bit unsigned integer.
ENUM: Enum value as integer.
SINT32: 32-bit signed integer. Uses "zig-zag" encoding.
SINT64: 64-bit signed integer. Uses "zig-zag" encoding.
"""
DOUBLE = 1
FLOAT = 2
INT64 = 3
UINT64 = 4
INT32 = 5
BOOL = 8
STRING = 9
MESSAGE = 11
BYTES = 12
UINT32 = 13
ENUM = 14
SINT32 = 17
SINT64 = 18
class _MessageClass(_DefinitionClass):
"""Meta-class used for defining the Message base class.
For more details about Message classes, see the Message class docstring.
Information contained there may help understanding this class.
Meta-class enables very specific behavior for any defined Message
class. All attributes defined on an Message sub-class must be
field instances, Enum class definitions or other Message class
definitions. Each field attribute defined on an Message sub-class
is added to the set of field definitions and the attribute is
translated in to a slot. It also ensures that only one level of
Message class hierarchy is possible. In other words it is not
possible to declare sub-classes of sub-classes of Message.
This class also defines some functions in order to restrict the
behavior of the Message class and its sub-classes. It is not
possible to change the behavior of the Message class in later
classes since any new classes may be defined with only field,
Enums and Messages, and no methods.
"""
# pylint:disable=bad-mcs-classmethod-argument
def __new__(cls, name, bases, dct):
"""Create new Message class instance.
The __new__ method of the _MessageClass type is overridden so as to
allow the translation of Field instances to slots.
"""
by_number = {}
by_name = {}
variant_map = {} # pylint:disable=unused-variable
if bases != (object,):
# Can only define one level of sub-classes below Message.
if bases != (Message,):
raise MessageDefinitionError(
'Message types may only inherit from Message')
enums = []
messages = []
# Must not use iteritems because this loop will change the state of
# dct.
for key, field in dct.items():
if key in _RESERVED_ATTRIBUTE_NAMES:
continue
if isinstance(field, type) and issubclass(field, Enum):
enums.append(key)
continue
if (isinstance(field, type) and
issubclass(field, Message) and
field is not Message):
messages.append(key)
continue
# Reject anything that is not a field.
# pylint:disable=unidiomatic-typecheck
if type(field) is Field or not isinstance(field, Field):
raise MessageDefinitionError(
'May only use fields in message definitions. '
'Found: %s = %s' %
(key, field))
if field.number in by_number:
raise DuplicateNumberError(
'Field with number %d declared more than once in %s' %
(field.number, name))
field.name = key
# Place in name and number maps.
by_name[key] = field
by_number[field.number] = field
# Add enums if any exist.
if enums:
dct['__enums__'] = sorted(enums)
# Add messages if any exist.
if messages:
dct['__messages__'] = sorted(messages)
dct['_Message__by_number'] = by_number
dct['_Message__by_name'] = by_name
return _DefinitionClass.__new__(cls, name, bases, dct)
def __init__(cls, name, bases, dct):
"""Initializer required to assign references to new class."""
if bases != (object,):
for v in dct.values():
if isinstance(v, _DefinitionClass) and v is not Message:
v._message_definition = weakref.ref(cls)
for field in cls.all_fields():
field._message_definition = weakref.ref(cls)
_DefinitionClass.__init__(cls, name, bases, dct)
class Message(six.with_metaclass(_MessageClass, object)):
"""Base class for user defined message objects.
Used to define messages for efficient transmission across network or
process space. Messages are defined using the field classes (IntegerField,
FloatField, EnumField, etc.).
Messages are more restricted than normal classes in that they may
only contain field attributes and other Message and Enum
definitions. These restrictions are in place because the structure
of the Message class is intentended to itself be transmitted
across network or process space and used directly by clients or
even other servers. As such methods and non-field attributes could
not be transmitted with the structural information causing
discrepancies between different languages and implementations.
Initialization and validation:
A Message object is considered to be initialized if it has all required
fields and any nested messages are also initialized.
Calling 'check_initialized' will raise a ValidationException if it is not
initialized; 'is_initialized' returns a boolean value indicating if it is
valid.
Validation automatically occurs when Message objects are created
and populated. Validation that a given value will be compatible with
a field that it is assigned to can be done through the Field instances
validate() method. The validate method used on a message will check that
all values of a message and its sub-messages are valid. Assingning an
invalid value to a field will raise a ValidationException.
Example:
# Trade type.
class TradeType(Enum):
BUY = 1
SELL = 2
SHORT = 3
CALL = 4
class Lot(Message):
price = IntegerField(1, required=True)
quantity = IntegerField(2, required=True)
class Order(Message):
symbol = StringField(1, required=True)
total_quantity = IntegerField(2, required=True)
trade_type = EnumField(TradeType, 3, required=True)
lots = MessageField(Lot, 4, repeated=True)
limit = IntegerField(5)
order = Order(symbol='GOOG',
total_quantity=10,
trade_type=TradeType.BUY)
lot1 = Lot(price=304,
quantity=7)
lot2 = Lot(price = 305,
quantity=3)
order.lots = [lot1, lot2]
# Now object is initialized!
order.check_initialized()
"""
def __init__(self, **kwargs):
"""Initialize internal messages state.
Args:
A message can be initialized via the constructor by passing
in keyword arguments corresponding to fields. For example:
class Date(Message):
day = IntegerField(1)
month = IntegerField(2)
year = IntegerField(3)
Invoking:
date = Date(day=6, month=6, year=1911)
is the same as doing:
date = Date()
date.day = 6
date.month = 6
date.year = 1911
"""
# Tag being an essential implementation detail must be private.
self.__tags = {}
self.__unrecognized_fields = {}
assigned = set()
for name, value in kwargs.items():
setattr(self, name, value)
assigned.add(name)
# initialize repeated fields.
for field in self.all_fields():
if field.repeated and field.name not in assigned:
setattr(self, field.name, [])
def check_initialized(self):
"""Check class for initialization status.
Check that all required fields are initialized
Raises:
ValidationError: If message is not initialized.
"""
for name, field in self.__by_name.items():
value = getattr(self, name)
if value is None:
if field.required:
raise ValidationError(
"Message %s is missing required field %s" %
(type(self).__name__, name))
else:
try:
if (isinstance(field, MessageField) and
issubclass(field.message_type, Message)):
if field.repeated:
for item in value:
item_message_value = field.value_to_message(
item)
item_message_value.check_initialized()
else:
message_value = field.value_to_message(value)
message_value.check_initialized()
except ValidationError as err:
if not hasattr(err, 'message_name'):
err.message_name = type(self).__name__
raise
def is_initialized(self):
"""Get initialization status.
Returns:
True if message is valid, else False.
"""
try:
self.check_initialized()
except ValidationError:
return False
else:
return True
@classmethod
def all_fields(cls):
"""Get all field definition objects.
Ordering is arbitrary.
Returns:
Iterator over all values in arbitrary order.
"""
return cls.__by_name.values()
@classmethod
def field_by_name(cls, name):
"""Get field by name.
Returns:
Field object associated with name.
Raises:
KeyError if no field found by that name.
"""
return cls.__by_name[name]
@classmethod
def field_by_number(cls, number):
"""Get field by number.
Returns:
Field object associated with number.
Raises:
KeyError if no field found by that number.
"""
return cls.__by_number[number]
def get_assigned_value(self, name):
"""Get the assigned value of an attribute.
Get the underlying value of an attribute. If value has not
been set, will not return the default for the field.
Args:
name: Name of attribute to get.
Returns:
Value of attribute, None if it has not been set.
"""
message_type = type(self)
try:
field = message_type.field_by_name(name)
except KeyError:
raise AttributeError('Message %s has no field %s' % (
message_type.__name__, name))
return self.__tags.get(field.number)
def reset(self, name):
"""Reset assigned value for field.
Resetting a field will return it to its default value or None.
Args:
name: Name of field to reset.
"""
message_type = type(self)
try:
field = message_type.field_by_name(name)
except KeyError:
if name not in message_type.__by_name:
raise AttributeError('Message %s has no field %s' % (
message_type.__name__, name))
if field.repeated:
self.__tags[field.number] = FieldList(field, [])
else:
self.__tags.pop(field.number, None)
def all_unrecognized_fields(self):
"""Get the names of all unrecognized fields in this message."""
return list(self.__unrecognized_fields.keys())
def get_unrecognized_field_info(self, key, value_default=None,
variant_default=None):
"""Get the value and variant of an unknown field in this message.
Args:
key: The name or number of the field to retrieve.
value_default: Value to be returned if the key isn't found.
variant_default: Value to be returned as variant if the key isn't
found.
Returns:
(value, variant), where value and variant are whatever was passed
to set_unrecognized_field.
"""
value, variant = self.__unrecognized_fields.get(key, (value_default,
variant_default))
return value, variant
def set_unrecognized_field(self, key, value, variant):
"""Set an unrecognized field, used when decoding a message.
Args:
key: The name or number used to refer to this unknown value.
value: The value of the field.
variant: Type information needed to interpret the value or re-encode
it.
Raises:
TypeError: If the variant is not an instance of messages.Variant.
"""
if not isinstance(variant, Variant):
raise TypeError('Variant type %s is not valid.' % variant)
self.__unrecognized_fields[key] = value, variant
def __setattr__(self, name, value):
"""Change set behavior for messages.
Messages may only be assigned values that are fields.
Does not try to validate field when set.
Args:
name: Name of field to assign to.
value: Value to assign to field.
Raises:
AttributeError when trying to assign value that is not a field.
"""
if name in self.__by_name or name.startswith('_Message__'):
object.__setattr__(self, name, value)
else:
raise AttributeError("May not assign arbitrary value %s "
"to message %s" % (name, type(self).__name__))
def __repr__(self):
"""Make string representation of message.
Example:
class MyMessage(messages.Message):
integer_value = messages.IntegerField(1)
string_value = messages.StringField(2)
my_message = MyMessage()
my_message.integer_value = 42
my_message.string_value = u'A string'
print my_message
>>> <MyMessage
... integer_value: 42
... string_value: u'A string'>
Returns:
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | true |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/protorpclite/protojson_test.py | apitools/base/protorpclite/protojson_test.py | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for apitools.base.protorpclite.protojson."""
import datetime
import json
import unittest
from apitools.base.protorpclite import message_types
from apitools.base.protorpclite import messages
from apitools.base.protorpclite import protojson
from apitools.base.protorpclite import test_util
class CustomField(messages.MessageField):
"""Custom MessageField class."""
type = int
message_type = message_types.VoidMessage
def __init__(self, number, **kwargs):
super(CustomField, self).__init__(self.message_type, number, **kwargs)
def value_to_message(self, value):
return self.message_type() # pylint:disable=not-callable
class MyMessage(messages.Message):
"""Test message containing various types."""
class Color(messages.Enum):
RED = 1
GREEN = 2
BLUE = 3
class Nested(messages.Message):
nested_value = messages.StringField(1)
class NestedDatetime(messages.Message):
nested_dt_value = message_types.DateTimeField(1)
a_string = messages.StringField(2)
an_integer = messages.IntegerField(3)
a_float = messages.FloatField(4)
a_boolean = messages.BooleanField(5)
an_enum = messages.EnumField(Color, 6)
a_nested = messages.MessageField(Nested, 7)
a_repeated = messages.IntegerField(8, repeated=True)
a_repeated_float = messages.FloatField(9, repeated=True)
a_datetime = message_types.DateTimeField(10)
a_repeated_datetime = message_types.DateTimeField(11, repeated=True)
a_custom = CustomField(12)
a_repeated_custom = CustomField(13, repeated=True)
a_nested_datetime = messages.MessageField(NestedDatetime, 14)
class ModuleInterfaceTest(test_util.ModuleInterfaceTest,
test_util.TestCase):
MODULE = protojson
# TODO(rafek): Convert this test to the compliance test in test_util.
class ProtojsonTest(test_util.TestCase,
test_util.ProtoConformanceTestBase):
"""Test JSON encoding and decoding."""
PROTOLIB = protojson
def CompareEncoded(self, expected_encoded, actual_encoded):
"""JSON encoding will be laundered to remove string differences."""
self.assertEqual(json.loads(expected_encoded),
json.loads(actual_encoded))
encoded_empty_message = '{}'
encoded_partial = """{
"double_value": 1.23,
"int64_value": -100000000000,
"int32_value": 1020,
"string_value": "a string",
"enum_value": "VAL2"
}
"""
# pylint:disable=anomalous-unicode-escape-in-string
encoded_full = """{
"double_value": 1.23,
"float_value": -2.5,
"int64_value": -100000000000,
"uint64_value": 102020202020,
"int32_value": 1020,
"bool_value": true,
"string_value": "a string\u044f",
"bytes_value": "YSBieXRlc//+",
"enum_value": "VAL2"
}
"""
encoded_repeated = """{
"double_value": [1.23, 2.3],
"float_value": [-2.5, 0.5],
"int64_value": [-100000000000, 20],
"uint64_value": [102020202020, 10],
"int32_value": [1020, 718],
"bool_value": [true, false],
"string_value": ["a string\u044f", "another string"],
"bytes_value": ["YSBieXRlc//+", "YW5vdGhlciBieXRlcw=="],
"enum_value": ["VAL2", "VAL1"]
}
"""
encoded_nested = """{
"nested": {
"a_value": "a string"
}
}
"""
encoded_repeated_nested = """{
"repeated_nested": [{"a_value": "a string"},
{"a_value": "another string"}]
}
"""
unexpected_tag_message = '{"unknown": "value"}'
encoded_default_assigned = '{"a_value": "a default"}'
encoded_nested_empty = '{"nested": {}}'
encoded_repeated_nested_empty = '{"repeated_nested": [{}, {}]}'
encoded_extend_message = '{"int64_value": [400, 50, 6000]}'
encoded_string_types = '{"string_value": "Latin"}'
encoded_invalid_enum = '{"enum_value": "undefined"}'
encoded_invalid_repeated_enum = '{"enum_value": ["VAL1", "undefined"]}'
def testConvertIntegerToFloat(self):
"""Test that integers passed in to float fields are converted.
This is necessary because JSON outputs integers for numbers
with 0 decimals.
"""
message = protojson.decode_message(MyMessage, '{"a_float": 10}')
self.assertTrue(isinstance(message.a_float, float))
self.assertEqual(10.0, message.a_float)
def testConvertStringToNumbers(self):
"""Test that strings passed to integer fields are converted."""
message = protojson.decode_message(MyMessage,
"""{"an_integer": "10",
"a_float": "3.5",
"a_repeated": ["1", "2"],
"a_repeated_float": ["1.5", "2", 10]
}""")
self.assertEqual(MyMessage(an_integer=10,
a_float=3.5,
a_repeated=[1, 2],
a_repeated_float=[1.5, 2.0, 10.0]),
message)
def testWrongTypeAssignment(self):
"""Test when wrong type is assigned to a field."""
self.assertRaises(messages.ValidationError,
protojson.decode_message,
MyMessage, '{"a_string": 10}')
self.assertRaises(messages.ValidationError,
protojson.decode_message,
MyMessage, '{"an_integer": 10.2}')
self.assertRaises(messages.ValidationError,
protojson.decode_message,
MyMessage, '{"an_integer": "10.2"}')
def testNumericEnumeration(self):
"""Test that numbers work for enum values."""
message = protojson.decode_message(MyMessage, '{"an_enum": 2}')
expected_message = MyMessage()
expected_message.an_enum = MyMessage.Color.GREEN
self.assertEqual(expected_message, message)
def testNumericEnumerationNegativeTest(self):
"""Test with an invalid number for the enum value."""
# The message should successfully decode.
message = protojson.decode_message(MyMessage,
'{"an_enum": 89}')
expected_message = MyMessage()
self.assertEqual(expected_message, message)
# The roundtrip should result in equivalent encoded
# message.
self.assertEqual('{"an_enum": 89}', protojson.encode_message(message))
def testAlphaEnumeration(self):
"""Test that alpha enum values work."""
message = protojson.decode_message(MyMessage, '{"an_enum": "RED"}')
expected_message = MyMessage()
expected_message.an_enum = MyMessage.Color.RED
self.assertEqual(expected_message, message)
def testAlphaEnumerationNegativeTest(self):
"""The alpha enum value is invalid."""
# The message should successfully decode.
message = protojson.decode_message(MyMessage,
'{"an_enum": "IAMINVALID"}')
expected_message = MyMessage()
self.assertEqual(expected_message, message)
# The roundtrip should result in equivalent encoded message.
self.assertEqual('{"an_enum": "IAMINVALID"}',
protojson.encode_message(message))
def testEnumerationNegativeTestWithEmptyString(self):
"""The enum value is an empty string."""
# The message should successfully decode.
message = protojson.decode_message(MyMessage, '{"an_enum": ""}')
expected_message = MyMessage()
self.assertEqual(expected_message, message)
# The roundtrip should result in equivalent encoded message.
self.assertEqual('{"an_enum": ""}', protojson.encode_message(message))
def testNullValues(self):
"""Test that null values overwrite existing values."""
self.assertEqual(MyMessage(),
protojson.decode_message(MyMessage,
('{"an_integer": null,'
' "a_nested": null,'
' "an_enum": null'
'}')))
def testEmptyList(self):
"""Test that empty lists are ignored."""
self.assertEqual(MyMessage(),
protojson.decode_message(MyMessage,
'{"a_repeated": []}'))
def testNotJSON(self):
"""Test error when string is not valid JSON."""
self.assertRaises(
ValueError,
protojson.decode_message, MyMessage,
'{this is not json}')
def testDoNotEncodeStrangeObjects(self):
"""Test trying to encode a strange object.
The main purpose of this test is to complete coverage. It
ensures that the default behavior of the JSON encoder is
preserved when someone tries to serialized an unexpected type.
"""
class BogusObject(object):
def check_initialized(self):
pass
self.assertRaises(TypeError,
protojson.encode_message,
BogusObject())
def testMergeEmptyString(self):
"""Test merging the empty or space only string."""
message = protojson.decode_message(test_util.OptionalMessage, '')
self.assertEqual(test_util.OptionalMessage(), message)
message = protojson.decode_message(test_util.OptionalMessage, ' ')
self.assertEqual(test_util.OptionalMessage(), message)
def testProtojsonUnrecognizedFieldName(self):
"""Test that unrecognized fields are saved and can be accessed."""
decoded = protojson.decode_message(
MyMessage,
('{"an_integer": 1, "unknown_val": 2}'))
self.assertEqual(decoded.an_integer, 1)
self.assertEqual(1, len(decoded.all_unrecognized_fields()))
self.assertEqual('unknown_val', decoded.all_unrecognized_fields()[0])
self.assertEqual((2, messages.Variant.INT64),
decoded.get_unrecognized_field_info('unknown_val'))
def testProtojsonUnrecognizedFieldNumber(self):
"""Test that unrecognized fields are saved and can be accessed."""
decoded = protojson.decode_message(
MyMessage,
'{"an_integer": 1, "1001": "unknown", "-123": "negative", '
'"456_mixed": 2}')
self.assertEqual(decoded.an_integer, 1)
self.assertEqual(3, len(decoded.all_unrecognized_fields()))
self.assertFalse(1001 in decoded.all_unrecognized_fields())
self.assertTrue('1001' in decoded.all_unrecognized_fields())
self.assertEqual(('unknown', messages.Variant.STRING),
decoded.get_unrecognized_field_info('1001'))
self.assertTrue('-123' in decoded.all_unrecognized_fields())
self.assertEqual(('negative', messages.Variant.STRING),
decoded.get_unrecognized_field_info('-123'))
self.assertTrue('456_mixed' in decoded.all_unrecognized_fields())
self.assertEqual((2, messages.Variant.INT64),
decoded.get_unrecognized_field_info('456_mixed'))
def testProtojsonUnrecognizedNull(self):
"""Test that unrecognized fields that are None are skipped."""
decoded = protojson.decode_message(
MyMessage,
'{"an_integer": 1, "unrecognized_null": null}')
self.assertEqual(decoded.an_integer, 1)
self.assertEqual(decoded.all_unrecognized_fields(), [])
def testUnrecognizedFieldVariants(self):
"""Test that unrecognized fields are mapped to the right variants."""
for encoded, expected_variant in (
('{"an_integer": 1, "unknown_val": 2}',
messages.Variant.INT64),
('{"an_integer": 1, "unknown_val": 2.0}',
messages.Variant.DOUBLE),
('{"an_integer": 1, "unknown_val": "string value"}',
messages.Variant.STRING),
('{"an_integer": 1, "unknown_val": [1, 2, 3]}',
messages.Variant.INT64),
('{"an_integer": 1, "unknown_val": [1, 2.0, 3]}',
messages.Variant.DOUBLE),
('{"an_integer": 1, "unknown_val": [1, "foo", 3]}',
messages.Variant.STRING),
('{"an_integer": 1, "unknown_val": true}',
messages.Variant.BOOL)):
decoded = protojson.decode_message(MyMessage, encoded)
self.assertEqual(decoded.an_integer, 1)
self.assertEqual(1, len(decoded.all_unrecognized_fields()))
self.assertEqual(
'unknown_val', decoded.all_unrecognized_fields()[0])
_, decoded_variant = decoded.get_unrecognized_field_info(
'unknown_val')
self.assertEqual(expected_variant, decoded_variant)
def testDecodeDateTime(self):
for datetime_string, datetime_vals in (
('2012-09-30T15:31:50.262', (2012, 9, 30, 15, 31, 50, 262000)),
('2012-09-30T15:31:50', (2012, 9, 30, 15, 31, 50, 0))):
message = protojson.decode_message(
MyMessage, '{"a_datetime": "%s"}' % datetime_string)
expected_message = MyMessage(
a_datetime=datetime.datetime(*datetime_vals))
self.assertEqual(expected_message, message)
def testDecodeInvalidDateTime(self):
self.assertRaises(messages.DecodeError, protojson.decode_message,
MyMessage, '{"a_datetime": "invalid"}')
def testDecodeInvalidMessage(self):
encoded = """{
"a_nested_datetime": {
"nested_dt_value": "invalid"
}
}
"""
self.assertRaises(messages.DecodeError, protojson.decode_message,
MyMessage, encoded)
def testEncodeDateTime(self):
for datetime_string, datetime_vals in (
('2012-09-30T15:31:50.262000',
(2012, 9, 30, 15, 31, 50, 262000)),
('2012-09-30T15:31:50.262123',
(2012, 9, 30, 15, 31, 50, 262123)),
('2012-09-30T15:31:50',
(2012, 9, 30, 15, 31, 50, 0))):
decoded_message = protojson.encode_message(
MyMessage(a_datetime=datetime.datetime(*datetime_vals)))
expected_decoding = '{"a_datetime": "%s"}' % datetime_string
self.CompareEncoded(expected_decoding, decoded_message)
def testDecodeRepeatedDateTime(self):
message = protojson.decode_message(
MyMessage,
'{"a_repeated_datetime": ["2012-09-30T15:31:50.262", '
'"2010-01-21T09:52:00", "2000-01-01T01:00:59.999999"]}')
expected_message = MyMessage(
a_repeated_datetime=[
datetime.datetime(2012, 9, 30, 15, 31, 50, 262000),
datetime.datetime(2010, 1, 21, 9, 52),
datetime.datetime(2000, 1, 1, 1, 0, 59, 999999)])
self.assertEqual(expected_message, message)
def testDecodeCustom(self):
message = protojson.decode_message(MyMessage, '{"a_custom": 1}')
self.assertEqual(MyMessage(a_custom=1), message)
def testDecodeInvalidCustom(self):
self.assertRaises(messages.ValidationError, protojson.decode_message,
MyMessage, '{"a_custom": "invalid"}')
def testEncodeCustom(self):
decoded_message = protojson.encode_message(MyMessage(a_custom=1))
self.CompareEncoded('{"a_custom": 1}', decoded_message)
def testDecodeRepeatedCustom(self):
message = protojson.decode_message(
MyMessage, '{"a_repeated_custom": [1, 2, 3]}')
self.assertEqual(MyMessage(a_repeated_custom=[1, 2, 3]), message)
def testDecodeRepeatedEmpty(self):
message = protojson.decode_message(
MyMessage, '{"a_repeated": []}')
self.assertEqual(MyMessage(a_repeated=[]), message)
def testDecodeNone(self):
message = protojson.decode_message(
MyMessage, '{"an_integer": []}')
self.assertEqual(MyMessage(an_integer=None), message)
def testDecodeBadBase64BytesField(self):
"""Test decoding improperly encoded base64 bytes value."""
self.assertRaisesWithRegexpMatch(
messages.DecodeError,
'Base64 decoding error',
protojson.decode_message,
test_util.OptionalMessage,
'{"bytes_value": "abcdefghijklmnopq"}')
class CustomProtoJson(protojson.ProtoJson):
def encode_field(self, field, value):
return '{encoded}' + value
def decode_field(self, field, value):
return '{decoded}' + value
class CustomProtoJsonTest(test_util.TestCase):
"""Tests for serialization overriding functionality."""
def setUp(self):
self.protojson = CustomProtoJson()
def testEncode(self):
self.assertEqual(
'{"a_string": "{encoded}xyz"}',
self.protojson.encode_message(MyMessage(a_string='xyz')))
def testDecode(self):
self.assertEqual(
MyMessage(a_string='{decoded}xyz'),
self.protojson.decode_message(MyMessage, '{"a_string": "xyz"}'))
def testDecodeEmptyMessage(self):
self.assertEqual(
MyMessage(a_string='{decoded}'),
self.protojson.decode_message(MyMessage, '{"a_string": ""}'))
def testDefault(self):
self.assertTrue(protojson.ProtoJson.get_default(),
protojson.ProtoJson.get_default())
instance = CustomProtoJson()
protojson.ProtoJson.set_default(instance)
self.assertTrue(instance is protojson.ProtoJson.get_default())
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/protorpclite/message_types.py | apitools/base/protorpclite/message_types.py | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Simple protocol message types.
Includes new message and field types that are outside what is defined by the
protocol buffers standard.
"""
import datetime
from apitools.base.protorpclite import messages
from apitools.base.protorpclite import util
__all__ = [
'DateTimeField',
'DateTimeMessage',
'VoidMessage',
]
class VoidMessage(messages.Message):
"""Empty message."""
class DateTimeMessage(messages.Message):
"""Message to store/transmit a DateTime.
Fields:
milliseconds: Milliseconds since Jan 1st 1970 local time.
time_zone_offset: Optional time zone offset, in minutes from UTC.
"""
milliseconds = messages.IntegerField(1, required=True)
time_zone_offset = messages.IntegerField(2)
class DateTimeField(messages.MessageField):
"""Field definition for datetime values.
Stores a python datetime object as a field. If time zone information is
included in the datetime object, it will be included in
the encoded data when this is encoded/decoded.
"""
type = datetime.datetime
message_type = DateTimeMessage
@util.positional(3)
def __init__(self,
number,
**kwargs):
super(DateTimeField, self).__init__(self.message_type,
number,
**kwargs)
def value_from_message(self, message):
"""Convert DateTimeMessage to a datetime.
Args:
A DateTimeMessage instance.
Returns:
A datetime instance.
"""
message = super(DateTimeField, self).value_from_message(message)
if message.time_zone_offset is None:
return datetime.datetime.fromtimestamp(
message.milliseconds / 1000.0, tz=datetime.timezone.utc).replace(tzinfo=None)
# Need to subtract the time zone offset, because when we call
# datetime.fromtimestamp, it will add the time zone offset to the
# value we pass.
milliseconds = (message.milliseconds -
60000 * message.time_zone_offset)
timezone = util.TimeZoneOffset(message.time_zone_offset)
return datetime.datetime.fromtimestamp(milliseconds / 1000.0,
tz=timezone)
def value_to_message(self, value):
value = super(DateTimeField, self).value_to_message(value)
# First, determine the delta from the epoch, so we can fill in
# DateTimeMessage's milliseconds field.
if value.tzinfo is None:
time_zone_offset = 0
local_epoch = datetime.datetime.fromtimestamp(0, tz=datetime.timezone.utc).replace(tzinfo=None)
else:
time_zone_offset = util.total_seconds(
value.tzinfo.utcoffset(value))
# Determine Jan 1, 1970 local time.
local_epoch = datetime.datetime.fromtimestamp(-time_zone_offset,
tz=value.tzinfo)
delta = value - local_epoch
# Create and fill in the DateTimeMessage, including time zone if
# one was specified.
message = DateTimeMessage()
message.milliseconds = int(util.total_seconds(delta) * 1000)
if value.tzinfo is not None:
utc_offset = value.tzinfo.utcoffset(value)
if utc_offset is not None:
message.time_zone_offset = int(
util.total_seconds(value.tzinfo.utcoffset(value)) / 60)
return message
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/base/protorpclite/message_types_test.py | apitools/base/protorpclite/message_types_test.py | #!/usr/bin/env python
#
# Copyright 2013 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for apitools.base.protorpclite.message_types."""
import datetime
import unittest
from apitools.base.protorpclite import message_types
from apitools.base.protorpclite import messages
from apitools.base.protorpclite import test_util
from apitools.base.protorpclite import util
class ModuleInterfaceTest(test_util.ModuleInterfaceTest,
test_util.TestCase):
MODULE = message_types
class DateTimeFieldTest(test_util.TestCase):
def testValueToMessage(self):
field = message_types.DateTimeField(1)
message = field.value_to_message(
datetime.datetime(2033, 2, 4, 11, 22, 10))
self.assertEqual(
message_types.DateTimeMessage(milliseconds=1991128930000), message)
def testValueToMessageBadValue(self):
field = message_types.DateTimeField(1)
self.assertRaisesWithRegexpMatch(
messages.EncodeError,
'Expected type datetime, got int: 20',
field.value_to_message, 20)
def testValueToMessageWithTimeZone(self):
time_zone = util.TimeZoneOffset(60 * 10)
field = message_types.DateTimeField(1)
message = field.value_to_message(
datetime.datetime(2033, 2, 4, 11, 22, 10, tzinfo=time_zone))
self.assertEqual(
message_types.DateTimeMessage(milliseconds=1991128930000,
time_zone_offset=600),
message)
def testValueFromMessage(self):
message = message_types.DateTimeMessage(milliseconds=1991128000000)
field = message_types.DateTimeField(1)
timestamp = field.value_from_message(message)
self.assertEqual(datetime.datetime(2033, 2, 4, 11, 6, 40),
timestamp)
def testValueFromMessageBadValue(self):
field = message_types.DateTimeField(1)
self.assertRaisesWithRegexpMatch(
messages.DecodeError,
'Expected type DateTimeMessage, got VoidMessage: <VoidMessage>',
field.value_from_message, message_types.VoidMessage())
def testValueFromMessageWithTimeZone(self):
message = message_types.DateTimeMessage(milliseconds=1991128000000,
time_zone_offset=300)
field = message_types.DateTimeField(1)
timestamp = field.value_from_message(message)
time_zone = util.TimeZoneOffset(60 * 5)
self.assertEqual(
datetime.datetime(2033, 2, 4, 11, 6, 40, tzinfo=time_zone),
timestamp)
if __name__ == '__main__':
unittest.main()
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/gen/gen_client_test.py | apitools/gen/gen_client_test.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for gen_client module."""
import os
import unittest
from apitools.gen import gen_client
from apitools.gen import test_utils
def GetTestDataPath(*path):
return os.path.join(os.path.dirname(__file__), 'testdata', *path)
def _GetContent(file_path):
with open(file_path) as f:
return f.read()
class ClientGenCliTest(unittest.TestCase):
def testHelp_NotEnoughArguments(self):
with self.assertRaisesRegex(SystemExit, '0'):
with test_utils.CaptureOutput() as (_, err):
gen_client.main([gen_client.__file__, '-h'])
err_output = err.getvalue()
self.assertIn('usage:', err_output)
self.assertIn('error: too few arguments', err_output)
def testGenClient_SimpleDocNoInit(self):
with test_utils.TempDir() as tmp_dir_path:
gen_client.main([
gen_client.__file__,
'--init-file', 'none',
'--infile', GetTestDataPath('dns', 'dns_v1.json'),
'--outdir', tmp_dir_path,
'--overwrite',
'--root_package', 'google.apis',
'client'
])
expected_files = (
set(['dns_v1_client.py', 'dns_v1_messages.py']))
self.assertEqual(expected_files, set(os.listdir(tmp_dir_path)))
def testGenClient_SimpleDocEmptyInit(self):
with test_utils.TempDir() as tmp_dir_path:
gen_client.main([
gen_client.__file__,
'--init-file', 'empty',
'--infile', GetTestDataPath('dns', 'dns_v1.json'),
'--outdir', tmp_dir_path,
'--overwrite',
'--root_package', 'google.apis',
'client'
])
expected_files = (
set(['dns_v1_client.py', 'dns_v1_messages.py', '__init__.py']))
self.assertEqual(expected_files, set(os.listdir(tmp_dir_path)))
init_file = _GetContent(os.path.join(tmp_dir_path, '__init__.py'))
self.assertEqual("""\"""Package marker file.\"""
from __future__ import absolute_import
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__)
""", init_file)
def testGenClient_SimpleDocWithV4(self):
with test_utils.TempDir() as tmp_dir_path:
gen_client.main([
gen_client.__file__,
'--infile', GetTestDataPath('dns', 'dns_v1.json'),
'--outdir', tmp_dir_path,
'--overwrite',
'--apitools_version', '0.4.12',
'--root_package', 'google.apis',
'client'
])
self.assertEqual(
set(['dns_v1_client.py', 'dns_v1_messages.py', '__init__.py']),
set(os.listdir(tmp_dir_path)))
def testGenClient_SimpleDocWithV5(self):
with test_utils.TempDir() as tmp_dir_path:
gen_client.main([
gen_client.__file__,
'--infile', GetTestDataPath('dns', 'dns_v1.json'),
'--outdir', tmp_dir_path,
'--overwrite',
'--apitools_version', '0.5.0',
'--root_package', 'google.apis',
'client'
])
self.assertEqual(
set(['dns_v1_client.py', 'dns_v1_messages.py', '__init__.py']),
set(os.listdir(tmp_dir_path)))
def testGenClient_ApiVersioning(self):
with test_utils.TempDir() as tmp_dir_path:
gen_client.main([
gen_client.__file__,
'--infile', GetTestDataPath(
'dns', 'dns_2015-08-07-preview.json'),
'--outdir', tmp_dir_path,
'--overwrite',
'--version-identifier', 'v2015_08_07_preview',
'--root_package', 'google.apis',
'client'
])
self.assertEqual(
set([
'dns_v2015_08_07_preview_client.py',
'dns_v2015_08_07_preview_messages.py',
'__init__.py']),
set(os.listdir(tmp_dir_path)))
client_file = _GetContent(
os.path.join(tmp_dir_path, 'dns_v2015_08_07_preview_client.py'))
# Check that "apiVersion" system parameter values from discovery doc
# appear in generated client.
self.assertIn('2015-01-01-preview', client_file)
self.assertIn('2015-02-02-preview', client_file)
self.assertIn('2015-03-03-preview', client_file)
def testGenClient_AnyObjectCustomFormat(self):
with test_utils.TempDir() as tmp_dir_path:
gen_client.main([
gen_client.__file__,
'--infile', GetTestDataPath(
'compute', 'compute_2025-01-01-preview.json'),
'--outdir', tmp_dir_path,
'--overwrite',
'--version-identifier', 'v2025_01_01_preview',
'--root_package', 'google.apis',
'client'
])
self.assertEqual(
set([
'compute_v2025_01_01_preview_client.py',
'compute_v2025_01_01_preview_messages.py',
'__init__.py']),
set(os.listdir(tmp_dir_path)))
def testGenPipPackage_SimpleDoc(self):
with test_utils.TempDir() as tmp_dir_path:
gen_client.main([
gen_client.__file__,
'--infile', GetTestDataPath('dns', 'dns_v1.json'),
'--outdir', tmp_dir_path,
'--overwrite',
'--root_package', 'google.apis',
'pip_package'
])
self.assertEqual(
set(['apitools', 'setup.py']),
set(os.listdir(tmp_dir_path)))
def testGenProto_SimpleDoc(self):
with test_utils.TempDir() as tmp_dir_path:
gen_client.main([
gen_client.__file__,
'--infile', GetTestDataPath('dns', 'dns_v1.json'),
'--outdir', tmp_dir_path,
'--overwrite',
'--root_package', 'google.apis',
'proto'
])
self.assertEqual(
set(['dns_v1_messages.proto', 'dns_v1_services.proto']),
set(os.listdir(tmp_dir_path)))
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/gen/util_test.py | apitools/gen/util_test.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for util."""
import codecs
import gzip
import os
import six.moves.urllib.request as urllib_request
import tempfile
import unittest
from apitools.gen import util
from mock import patch
class NormalizeVersionTest(unittest.TestCase):
def testVersions(self):
already_valid = 'v1'
self.assertEqual(already_valid, util.NormalizeVersion(already_valid))
to_clean = 'v0.1'
self.assertEqual('v0_1', util.NormalizeVersion(to_clean))
class NamesTest(unittest.TestCase):
def testKeywords(self):
names = util.Names([''])
self.assertEqual('in_', names.CleanName('in'))
def testNormalizeEnumName(self):
names = util.Names([''])
self.assertEqual('_0', names.NormalizeEnumName('0'))
class MockRequestResponse():
"""Mocks the behavior of urllib.response."""
class MockRequestEncoding():
def __init__(self, encoding):
self.encoding = encoding
def get(self, _):
return self.encoding
def __init__(self, content, encoding):
self.content = content
self.encoding = MockRequestResponse.MockRequestEncoding(encoding)
def info(self):
return self.encoding
def read(self):
return self.content
def _Gzip(raw_content):
"""Returns gzipped content from any content."""
f = tempfile.NamedTemporaryFile(suffix='gz', mode='wb', delete=False)
f.close()
try:
with gzip.open(f.name, 'wb') as h:
h.write(raw_content)
with open(f.name, 'rb') as h:
return h.read()
finally:
os.unlink(f.name)
class GetURLContentTest(unittest.TestCase):
def testUnspecifiedContentEncoding(self):
data = 'regular non-gzipped content'
with patch.object(urllib_request, 'urlopen',
return_value=MockRequestResponse(data, '')):
self.assertEqual(data, util._GetURLContent('unused_url_parameter'))
def testGZippedContent(self):
data = u'¿Hola qué tal?'
compressed_data = _Gzip(data.encode('utf-8'))
with patch.object(urllib_request, 'urlopen',
return_value=MockRequestResponse(
compressed_data, 'gzip')):
self.assertEqual(data, util._GetURLContent(
'unused_url_parameter').decode('utf-8'))
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/gen/gen_client_lib.py | apitools/gen/gen_client_lib.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple tool for generating a client library.
Relevant links:
https://developers.google.com/discovery/v1/reference/apis#resource
"""
import datetime
from apitools.gen import message_registry
from apitools.gen import service_registry
from apitools.gen import util
def _ApitoolsVersion():
"""Returns version of the currently installed google-apitools package."""
try:
import pkg_resources
except ImportError:
return 'X.X.X'
try:
return pkg_resources.get_distribution('google-apitools').version
except pkg_resources.DistributionNotFound:
return 'X.X.X'
def _StandardQueryParametersSchema(discovery_doc):
"""Sets up dict of standard query parameters."""
standard_query_schema = {
'id': 'StandardQueryParameters',
'type': 'object',
'description': 'Query parameters accepted by all methods.',
'properties': discovery_doc.get('parameters', {}),
}
# We add an entry for the trace, since Discovery doesn't.
standard_query_schema['properties']['trace'] = {
'type': 'string',
'description': ('A tracing token of the form "token:<tokenid>" '
'to include in api requests.'),
'location': 'query',
}
return standard_query_schema
class DescriptorGenerator(object):
"""Code generator for a given discovery document."""
def __init__(self, discovery_doc, client_info, names, root_package, outdir,
base_package, protorpc_package, init_wildcards_file=True,
use_proto2=False, unelidable_request_methods=None,
apitools_version=''):
self.__discovery_doc = discovery_doc
self.__client_info = client_info
self.__outdir = outdir
self.__use_proto2 = use_proto2
self.__description = util.CleanDescription(
self.__discovery_doc.get('description', ''))
self.__package = self.__client_info.package
self.__version = self.__client_info.version
self.__revision = discovery_doc.get('revision', '1')
self.__init_wildcards_file = init_wildcards_file
self.__root_package = root_package
self.__base_files_package = base_package
self.__protorpc_package = protorpc_package
self.__names = names
# Order is important here: we need the schemas before we can
# define the services.
self.__message_registry = message_registry.MessageRegistry(
self.__client_info, self.__names, self.__description,
self.__root_package, self.__base_files_package,
self.__protorpc_package)
schemas = self.__discovery_doc.get('schemas', {})
for schema_name, schema in sorted(schemas.items()):
self.__message_registry.AddDescriptorFromSchema(
schema_name, schema)
# We need to add one more message type for the global parameters.
standard_query_schema = _StandardQueryParametersSchema(
self.__discovery_doc)
self.__message_registry.AddDescriptorFromSchema(
standard_query_schema['id'], standard_query_schema)
# Now that we know all the messages, we need to correct some
# fields from MessageFields to EnumFields.
self.__message_registry.FixupMessageFields()
self.__services_registry = service_registry.ServiceRegistry(
self.__client_info,
self.__message_registry,
self.__names,
self.__root_package,
self.__base_files_package,
unelidable_request_methods or [])
services = self.__discovery_doc.get('resources', {})
for service_name, methods in sorted(services.items()):
self.__services_registry.AddServiceFromResource(
service_name, methods)
# We might also have top-level methods.
api_methods = self.__discovery_doc.get('methods', [])
if api_methods:
self.__services_registry.AddServiceFromResource(
'api', {'methods': api_methods})
# pylint: disable=protected-access
self.__client_info = self.__client_info._replace(
scopes=self.__services_registry.scopes)
# The apitools version that will be used in prerequisites for the
# generated packages.
self.__apitools_version = (
apitools_version if apitools_version else _ApitoolsVersion())
@property
def client_info(self):
return self.__client_info
@property
def discovery_doc(self):
return self.__discovery_doc
@property
def names(self):
return self.__names
@property
def outdir(self):
return self.__outdir
@property
def package(self):
return self.__package
@property
def use_proto2(self):
return self.__use_proto2
@property
def apitools_version(self):
return self.__apitools_version
def _GetPrinter(self, out):
printer = util.SimplePrettyPrinter(out)
return printer
def WriteInit(self, out):
"""Write a simple __init__.py for the generated client."""
printer = self._GetPrinter(out)
if self.__init_wildcards_file:
printer('"""Common imports for generated %s client library."""',
self.__client_info.package)
printer('# pylint:disable=wildcard-import')
else:
printer('"""Package marker file."""')
printer()
printer('from __future__ import absolute_import')
printer()
printer('import pkgutil')
printer()
if self.__init_wildcards_file:
printer('from %s import *', self.__base_files_package)
if self.__root_package == '.':
import_prefix = '.'
else:
import_prefix = '%s.' % self.__root_package
printer('from %s%s import *',
import_prefix, self.__client_info.client_rule_name)
printer('from %s%s import *',
import_prefix, self.__client_info.messages_rule_name)
printer()
printer('__path__ = pkgutil.extend_path(__path__, __name__)')
def WriteIntermediateInit(self, out):
"""Write a simple __init__.py for an intermediate directory."""
printer = self._GetPrinter(out)
printer('#!/usr/bin/env python')
printer('"""Shared __init__.py for apitools."""')
printer()
printer('from pkgutil import extend_path')
printer('__path__ = extend_path(__path__, __name__)')
def WriteSetupPy(self, out):
"""Write a setup.py for upload to PyPI."""
printer = self._GetPrinter(out)
year = datetime.datetime.now().year
printer('# Copyright %s Google Inc. All Rights Reserved.' % year)
printer('#')
printer('# Licensed under the Apache License, Version 2.0 (the'
'"License");')
printer('# you may not use this file except in compliance with '
'the License.')
printer('# You may obtain a copy of the License at')
printer('#')
printer('# http://www.apache.org/licenses/LICENSE-2.0')
printer('#')
printer('# Unless required by applicable law or agreed to in writing, '
'software')
printer('# distributed under the License is distributed on an "AS IS" '
'BASIS,')
printer('# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either '
'express or implied.')
printer('# See the License for the specific language governing '
'permissions and')
printer('# limitations under the License.')
printer()
printer('import setuptools')
printer('REQUIREMENTS = [')
with printer.Indent(indent=' '):
parts = self.apitools_version.split('.')
major = parts.pop(0)
minor = parts.pop(0)
printer('"google-apitools>=%s,~=%s.%s",',
self.apitools_version, major, minor)
printer('"httplib2>=0.9",')
printer('"oauth2client>=1.4.12",')
printer(']')
printer('_PACKAGE = "apitools.clients.%s"' % self.__package)
printer()
printer('setuptools.setup(')
# TODO(craigcitro): Allow customization of these options.
with printer.Indent(indent=' '):
printer('name="google-apitools-%s-%s",',
self.__package, self.__version)
printer('version="%s.%s",',
self.apitools_version, self.__revision)
printer('description="Autogenerated apitools library for %s",' % (
self.__package,))
printer('url="https://github.com/google/apitools",')
printer('author="Craig Citro",')
printer('author_email="craigcitro@google.com",')
printer('packages=setuptools.find_packages(),')
printer('install_requires=REQUIREMENTS,')
printer('classifiers=[')
with printer.Indent(indent=' '):
printer('"Programming Language :: Python :: 2.7",')
printer('"License :: OSI Approved :: Apache Software '
'License",')
printer('],')
printer('license="Apache 2.0",')
printer('keywords="apitools apitools-%s %s",' % (
self.__package, self.__package))
printer(')')
def WriteMessagesFile(self, out):
self.__message_registry.WriteFile(self._GetPrinter(out))
def WriteMessagesProtoFile(self, out):
self.__message_registry.WriteProtoFile(self._GetPrinter(out))
def WriteServicesProtoFile(self, out):
self.__services_registry.WriteProtoFile(self._GetPrinter(out))
def WriteClientLibrary(self, out):
self.__services_registry.WriteFile(self._GetPrinter(out))
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/gen/util.py | apitools/gen/util.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Assorted utilities shared between parts of apitools."""
from __future__ import print_function
from __future__ import unicode_literals
import collections
import contextlib
import gzip
import json
import keyword
import logging
import os
import re
import tempfile
import six
from six.moves import urllib_parse
import six.moves.urllib.error as urllib_error
import six.moves.urllib.request as urllib_request
class Error(Exception):
"""Base error for apitools generation."""
class CommunicationError(Error):
"""Error in network communication."""
def _SortLengthFirstKey(a):
return -len(a), a
class Names(object):
"""Utility class for cleaning and normalizing names in a fixed style."""
DEFAULT_NAME_CONVENTION = 'LOWER_CAMEL'
NAME_CONVENTIONS = ['LOWER_CAMEL', 'LOWER_WITH_UNDER', 'NONE']
def __init__(self, strip_prefixes,
name_convention=None,
capitalize_enums=False):
self.__strip_prefixes = sorted(strip_prefixes, key=_SortLengthFirstKey)
self.__name_convention = (
name_convention or self.DEFAULT_NAME_CONVENTION)
self.__capitalize_enums = capitalize_enums
@staticmethod
def __FromCamel(name, separator='_'):
name = re.sub(r'([a-z0-9])([A-Z])', r'\1%s\2' % separator, name)
return name.lower()
@staticmethod
def __ToCamel(name, separator='_'):
# TODO(craigcitro): Consider what to do about leading or trailing
# underscores (such as `_refValue` in discovery).
return ''.join(s[0:1].upper() + s[1:] for s in name.split(separator))
@staticmethod
def __ToLowerCamel(name, separator='_'):
name = Names.__ToCamel(name, separator=separator)
return name[0].lower() + name[1:]
def __StripName(self, name):
"""Strip strip_prefix entries from name."""
if not name:
return name
for prefix in self.__strip_prefixes:
if name.startswith(prefix):
return name[len(prefix):]
return name
@staticmethod
def CleanName(name):
"""Perform generic name cleaning."""
name = re.sub('[^_A-Za-z0-9]', '_', name)
if name[0].isdigit():
name = '_%s' % name
while keyword.iskeyword(name) or name == 'exec':
name = '%s_' % name
# If we end up with __ as a prefix, we'll run afoul of python
# field renaming, so we manually correct for it.
if name.startswith('__'):
name = 'f%s' % name
return name
@staticmethod
def NormalizeRelativePath(path):
"""Normalize camelCase entries in path."""
path_components = path.split('/')
normalized_components = []
for component in path_components:
if re.match(r'{[A-Za-z0-9_]+}$', component):
normalized_components.append(
'{%s}' % Names.CleanName(component[1:-1]))
else:
normalized_components.append(component)
return '/'.join(normalized_components)
def NormalizeEnumName(self, enum_name):
if self.__capitalize_enums:
enum_name = enum_name.upper()
return self.CleanName(enum_name)
def ClassName(self, name, separator='_'):
"""Generate a valid class name from name."""
# TODO(craigcitro): Get rid of this case here and in MethodName.
if name is None:
return name
# TODO(craigcitro): This is a hack to handle the case of specific
# protorpc class names; clean this up.
if name.startswith(('protorpc.', 'message_types.',
'apitools.base.protorpclite.',
'apitools.base.protorpclite.message_types.')):
return name
name = self.__StripName(name)
name = self.__ToCamel(name, separator=separator)
return self.CleanName(name)
def MethodName(self, name, separator='_'):
"""Generate a valid method name from name."""
if name is None:
return None
name = Names.__ToCamel(name, separator=separator)
return Names.CleanName(name)
def FieldName(self, name):
"""Generate a valid field name from name."""
# TODO(craigcitro): We shouldn't need to strip this name, but some
# of the service names here are excessive. Fix the API and then
# remove this.
name = self.__StripName(name)
if self.__name_convention == 'LOWER_CAMEL':
name = Names.__ToLowerCamel(name)
elif self.__name_convention == 'LOWER_WITH_UNDER':
name = Names.__FromCamel(name)
return Names.CleanName(name)
@contextlib.contextmanager
def Chdir(dirname, create=True):
if not os.path.exists(dirname):
if not create:
raise OSError('Cannot find directory %s' % dirname)
else:
os.mkdir(dirname)
previous_directory = os.getcwd()
try:
os.chdir(dirname)
yield
finally:
os.chdir(previous_directory)
def NormalizeVersion(version):
# Currently, '.' is the only character that might cause us trouble.
return version.replace('.', '_')
def _ComputePaths(package, version, root_url, service_path):
"""Compute the base url and base path.
Attributes:
package: name field of the discovery, i.e. 'storage' for storage service.
version: version of the service, i.e. 'v1'.
root_url: root url of the service, i.e. 'https://www.googleapis.com/'.
service_path: path of the service under the rool url, i.e. 'storage/v1/'.
Returns:
base url: string, base url of the service,
'https://www.googleapis.com/storage/v1/' for the storage service.
base path: string, common prefix of service endpoints after the base url.
"""
full_path = urllib_parse.urljoin(root_url, service_path)
api_path_component = '/'.join((package, version, ''))
if api_path_component not in full_path:
return full_path, ''
prefix, _, suffix = full_path.rpartition(api_path_component)
return prefix + api_path_component, suffix
class ClientInfo(collections.namedtuple('ClientInfo', (
'package', 'scopes', 'version', 'client_id', 'client_secret',
'user_agent', 'client_class_name', 'url_version', 'api_key',
'base_url', 'base_path', 'mtls_base_url'))):
"""Container for client-related info and names."""
@classmethod
def Create(cls, discovery_doc,
scope_ls, client_id, client_secret, user_agent, names, api_key,
version_identifier):
"""Create a new ClientInfo object from a discovery document."""
scopes = set(
discovery_doc.get('auth', {}).get('oauth2', {}).get('scopes', {}))
scopes.update(scope_ls)
package = discovery_doc['name']
version = (
version_identifier or NormalizeVersion(discovery_doc['version']))
url_version = discovery_doc['version']
base_url, base_path = _ComputePaths(package, url_version,
discovery_doc['rootUrl'],
discovery_doc['servicePath'])
mtls_root_url = discovery_doc.get('mtlsRootUrl', '')
mtls_base_url = ''
if mtls_root_url:
mtls_base_url, _ = _ComputePaths(package, url_version,
mtls_root_url,
discovery_doc['servicePath'])
client_info = {
'package': package,
'version': version,
'url_version': url_version,
'scopes': sorted(list(scopes)),
'client_id': client_id,
'client_secret': client_secret,
'user_agent': user_agent,
'api_key': api_key,
'base_url': base_url,
'base_path': base_path,
'mtls_base_url': mtls_base_url,
}
client_class_name = '%s%s' % (
names.ClassName(client_info['package']),
names.ClassName(client_info['version']))
client_info['client_class_name'] = client_class_name
return cls(**client_info)
@property
def default_directory(self):
return self.package
@property
def client_rule_name(self):
return '%s_%s_client' % (self.package, self.version)
@property
def client_file_name(self):
return '%s.py' % self.client_rule_name
@property
def messages_rule_name(self):
return '%s_%s_messages' % (self.package, self.version)
@property
def services_rule_name(self):
return '%s_%s_services' % (self.package, self.version)
@property
def messages_file_name(self):
return '%s.py' % self.messages_rule_name
@property
def messages_proto_file_name(self):
return '%s.proto' % self.messages_rule_name
@property
def services_proto_file_name(self):
return '%s.proto' % self.services_rule_name
def ReplaceHomoglyphs(s):
"""Returns s with unicode homoglyphs replaced by ascii equivalents."""
homoglyphs = {
'\xa0': ' ', # ?
'\u00e3': '', # TODO(gsfowler) drop after .proto spurious char elided
'\u00a0': ' ', # ?
'\u00a9': '(C)', # COPYRIGHT SIGN (would you believe "asciiglyph"?)
'\u00ae': '(R)', # REGISTERED SIGN (would you believe "asciiglyph"?)
'\u2014': '-', # EM DASH
'\u2018': "'", # LEFT SINGLE QUOTATION MARK
'\u2019': "'", # RIGHT SINGLE QUOTATION MARK
'\u201c': '"', # LEFT DOUBLE QUOTATION MARK
'\u201d': '"', # RIGHT DOUBLE QUOTATION MARK
'\u2026': '...', # HORIZONTAL ELLIPSIS
'\u2e3a': '-', # TWO-EM DASH
}
def _ReplaceOne(c):
"""Returns the homoglyph or escaped replacement for c."""
equiv = homoglyphs.get(c)
if equiv is not None:
return equiv
try:
c.encode('ascii')
return c
except UnicodeError:
pass
try:
return c.encode('unicode-escape').decode('ascii')
except UnicodeError:
return '?'
return ''.join([_ReplaceOne(c) for c in s])
def CleanDescription(description):
"""Return a version of description safe for printing in a docstring."""
if not isinstance(description, six.string_types):
return description
if six.PY3:
# https://docs.python.org/3/reference/lexical_analysis.html#index-18
description = description.replace('\\N', '\\\\N')
description = description.replace('\\u', '\\\\u')
description = description.replace('\\U', '\\\\U')
description = ReplaceHomoglyphs(description)
return description.replace('"""', '" " "')
class SimplePrettyPrinter(object):
"""Simple pretty-printer that supports an indent contextmanager."""
def __init__(self, out):
self.__out = out
self.__indent = ''
self.__skip = False
self.__comment_context = False
@property
def indent(self):
return self.__indent
def CalculateWidth(self, max_width=78):
return max_width - len(self.indent)
@contextlib.contextmanager
def Indent(self, indent=' '):
previous_indent = self.__indent
self.__indent = '%s%s' % (previous_indent, indent)
yield
self.__indent = previous_indent
@contextlib.contextmanager
def CommentContext(self):
"""Print without any argument formatting."""
old_context = self.__comment_context
self.__comment_context = True
yield
self.__comment_context = old_context
def __call__(self, *args):
if self.__comment_context and args[1:]:
raise Error('Cannot do string interpolation in comment context')
if args and args[0]:
if not self.__comment_context:
line = (args[0] % args[1:]).rstrip()
else:
line = args[0].rstrip()
line = ReplaceHomoglyphs(line)
try:
print('%s%s' % (self.__indent, line), file=self.__out)
except UnicodeEncodeError:
line = line.encode('ascii', 'backslashreplace').decode('ascii')
print('%s%s' % (self.__indent, line), file=self.__out)
else:
print('', file=self.__out)
def _NormalizeDiscoveryUrls(discovery_url):
"""Expands a few abbreviations into full discovery urls."""
if discovery_url.startswith('http'):
return [discovery_url]
elif '.' not in discovery_url:
raise ValueError('Unrecognized value "%s" for discovery url')
api_name, _, api_version = discovery_url.partition('.')
return [
'https://www.googleapis.com/discovery/v1/apis/%s/%s/rest' % (
api_name, api_version),
'https://%s.googleapis.com/$discovery/rest?version=%s' % (
api_name, api_version),
]
def _Gunzip(gzipped_content):
"""Returns gunzipped content from gzipped contents."""
f = tempfile.NamedTemporaryFile(suffix='gz', mode='w+b', delete=False)
try:
f.write(gzipped_content)
f.close() # force file synchronization
with gzip.open(f.name, 'rb') as h:
decompressed_content = h.read()
return decompressed_content
finally:
os.unlink(f.name)
def _GetURLContent(url):
"""Download and return the content of URL."""
response = urllib_request.urlopen(url)
encoding = response.info().get('Content-Encoding')
if encoding == 'gzip':
content = _Gunzip(response.read())
else:
content = response.read()
return content
def FetchDiscoveryDoc(discovery_url, retries=5):
"""Fetch the discovery document at the given url."""
discovery_urls = _NormalizeDiscoveryUrls(discovery_url)
discovery_doc = None
last_exception = None
for url in discovery_urls:
for _ in range(retries):
try:
content = _GetURLContent(url)
if isinstance(content, bytes):
content = content.decode('utf8')
discovery_doc = json.loads(content)
if discovery_doc:
return discovery_doc
except (urllib_error.HTTPError, urllib_error.URLError) as e:
logging.info(
'Attempting to fetch discovery doc again after "%s"', e)
last_exception = e
if discovery_doc is None:
raise CommunicationError(
'Could not find discovery doc at any of %s: %s' % (
discovery_urls, last_exception))
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/gen/test_utils.py | apitools/gen/test_utils.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Various utilities used in tests."""
import contextlib
import os
import shutil
import sys
import tempfile
import unittest
import six
SkipOnWindows = unittest.skipIf(
os.name == 'nt', 'Does not run on windows')
@contextlib.contextmanager
def TempDir(change_to=False):
if change_to:
original_dir = os.getcwd()
path = tempfile.mkdtemp()
try:
if change_to:
os.chdir(path)
yield path
finally:
if change_to:
os.chdir(original_dir)
shutil.rmtree(path)
@contextlib.contextmanager
def CaptureOutput():
new_stdout, new_stderr = six.StringIO(), six.StringIO()
old_stdout, old_stderr = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = new_stdout, new_stderr
yield new_stdout, new_stderr
finally:
sys.stdout, sys.stderr = old_stdout, old_stderr
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/gen/message_registry.py | apitools/gen/message_registry.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Message registry for apitools."""
import collections
import contextlib
import json
import six
from apitools.base.protorpclite import descriptor
from apitools.base.protorpclite import messages
from apitools.gen import extended_descriptor
from apitools.gen import util
TypeInfo = collections.namedtuple('TypeInfo', ('type_name', 'variant'))
class MessageRegistry(object):
"""Registry for message types.
This closely mirrors a messages.FileDescriptor, but adds additional
attributes (such as message and field descriptions) and some extra
code for validation and cycle detection.
"""
# Type information from these two maps comes from here:
# https://developers.google.com/discovery/v1/type-format
PRIMITIVE_TYPE_INFO_MAP = {
'string': TypeInfo(type_name='string',
variant=messages.StringField.DEFAULT_VARIANT),
'integer': TypeInfo(type_name='integer',
variant=messages.IntegerField.DEFAULT_VARIANT),
'boolean': TypeInfo(type_name='boolean',
variant=messages.BooleanField.DEFAULT_VARIANT),
'number': TypeInfo(type_name='number',
variant=messages.FloatField.DEFAULT_VARIANT),
'any': TypeInfo(type_name='extra_types.JsonValue',
variant=messages.Variant.MESSAGE),
}
PRIMITIVE_FORMAT_MAP = {
'int32': TypeInfo(type_name='integer',
variant=messages.Variant.INT32),
'uint32': TypeInfo(type_name='integer',
variant=messages.Variant.UINT32),
'int64': TypeInfo(type_name='string',
variant=messages.Variant.INT64),
'uint64': TypeInfo(type_name='string',
variant=messages.Variant.UINT64),
'double': TypeInfo(type_name='number',
variant=messages.Variant.DOUBLE),
'float': TypeInfo(type_name='number',
variant=messages.Variant.FLOAT),
'byte': TypeInfo(type_name='byte',
variant=messages.BytesField.DEFAULT_VARIANT),
'date': TypeInfo(type_name='extra_types.DateField',
variant=messages.Variant.STRING),
'date-time': TypeInfo(
type_name=('apitools.base.protorpclite.message_types.'
'DateTimeMessage'),
variant=messages.Variant.MESSAGE),
}
def __init__(self, client_info, names, description, root_package_dir,
base_files_package, protorpc_package):
self.__names = names
self.__client_info = client_info
self.__package = client_info.package
self.__description = util.CleanDescription(description)
self.__root_package_dir = root_package_dir
self.__base_files_package = base_files_package
self.__protorpc_package = protorpc_package
self.__file_descriptor = extended_descriptor.ExtendedFileDescriptor(
package=self.__package, description=self.__description)
# Add required imports
self.__file_descriptor.additional_imports = [
'from %s import messages as _messages' % self.__protorpc_package,
]
# Map from scoped names (i.e. Foo.Bar) to MessageDescriptors.
self.__message_registry = collections.OrderedDict()
# A set of types that we're currently adding (for cycle detection).
self.__nascent_types = set()
# A set of types for which we've seen a reference but no
# definition; if this set is nonempty, validation fails.
self.__unknown_types = set()
# Used for tracking paths during message creation
self.__current_path = []
# Where to register created messages
self.__current_env = self.__file_descriptor
# TODO(craigcitro): Add a `Finalize` method.
@property
def file_descriptor(self):
self.Validate()
return self.__file_descriptor
def WriteProtoFile(self, printer):
"""Write the messages file to out as proto."""
self.Validate()
extended_descriptor.WriteMessagesFile(
self.__file_descriptor, self.__package, self.__client_info.version,
printer)
def WriteFile(self, printer):
"""Write the messages file to out."""
self.Validate()
extended_descriptor.WritePythonFile(
self.__file_descriptor, self.__package, self.__client_info.version,
printer)
def Validate(self):
mysteries = self.__nascent_types or self.__unknown_types
if mysteries:
raise ValueError('Malformed MessageRegistry: %s' % mysteries)
def __ComputeFullName(self, name):
return '.'.join(map(six.text_type, self.__current_path[:] + [name]))
def __AddImport(self, new_import):
if new_import not in self.__file_descriptor.additional_imports:
self.__file_descriptor.additional_imports.append(new_import)
def __DeclareDescriptor(self, name):
self.__nascent_types.add(self.__ComputeFullName(name))
def __RegisterDescriptor(self, new_descriptor):
"""Register the given descriptor in this registry."""
if not isinstance(new_descriptor, (
extended_descriptor.ExtendedMessageDescriptor,
extended_descriptor.ExtendedEnumDescriptor)):
raise ValueError('Cannot add descriptor of type %s' % (
type(new_descriptor),))
full_name = self.__ComputeFullName(new_descriptor.name)
if full_name in self.__message_registry:
raise ValueError(
'Attempt to re-register descriptor %s' % full_name)
if full_name not in self.__nascent_types:
raise ValueError('Directly adding types is not supported')
new_descriptor.full_name = full_name
self.__message_registry[full_name] = new_descriptor
if isinstance(new_descriptor,
extended_descriptor.ExtendedMessageDescriptor):
self.__current_env.message_types.append(new_descriptor)
elif isinstance(new_descriptor,
extended_descriptor.ExtendedEnumDescriptor):
self.__current_env.enum_types.append(new_descriptor)
self.__unknown_types.discard(full_name)
self.__nascent_types.remove(full_name)
def LookupDescriptor(self, name):
return self.__GetDescriptorByName(name)
def LookupDescriptorOrDie(self, name):
message_descriptor = self.LookupDescriptor(name)
if message_descriptor is None:
raise ValueError('No message descriptor named "%s"' % name)
return message_descriptor
def __GetDescriptor(self, name):
return self.__GetDescriptorByName(self.__ComputeFullName(name))
def __GetDescriptorByName(self, name):
if name in self.__message_registry:
return self.__message_registry[name]
if name in self.__nascent_types:
raise ValueError(
'Cannot retrieve type currently being created: %s' % name)
return None
@contextlib.contextmanager
def __DescriptorEnv(self, message_descriptor):
# TODO(craigcitro): Typecheck?
previous_env = self.__current_env
self.__current_path.append(message_descriptor.name)
self.__current_env = message_descriptor
yield
self.__current_path.pop()
self.__current_env = previous_env
def AddEnumDescriptor(self, name, description,
enum_values, enum_descriptions):
"""Add a new EnumDescriptor named name with the given enum values."""
message = extended_descriptor.ExtendedEnumDescriptor()
message.name = self.__names.ClassName(name)
message.description = util.CleanDescription(description)
self.__DeclareDescriptor(message.name)
for index, (enum_name, enum_description) in enumerate(
zip(enum_values, enum_descriptions)):
enum_value = extended_descriptor.ExtendedEnumValueDescriptor()
enum_value.name = self.__names.NormalizeEnumName(enum_name)
if enum_value.name != enum_name:
message.enum_mappings.append(
extended_descriptor.ExtendedEnumDescriptor.JsonEnumMapping(
python_name=enum_value.name, json_name=enum_name))
self.__AddImport('from %s import encoding' %
self.__base_files_package)
enum_value.number = index
enum_value.description = util.CleanDescription(
enum_description or '<no description>')
message.values.append(enum_value)
self.__RegisterDescriptor(message)
def __DeclareMessageAlias(self, schema, alias_for):
"""Declare schema as an alias for alias_for."""
# TODO(craigcitro): This is a hack. Remove it.
message = extended_descriptor.ExtendedMessageDescriptor()
message.name = self.__names.ClassName(schema['id'])
message.alias_for = alias_for
self.__DeclareDescriptor(message.name)
self.__AddImport('from %s import extra_types' %
self.__base_files_package)
self.__RegisterDescriptor(message)
def __AddAdditionalProperties(self, message, schema, properties):
"""Add an additionalProperties field to message."""
additional_properties_info = schema['additionalProperties']
entries_type_name = self.__AddAdditionalPropertyType(
message.name, additional_properties_info)
description = util.CleanDescription(
additional_properties_info.get('description'))
if description is None:
description = 'Additional properties of type %s' % message.name
attrs = {
'items': {
'$ref': entries_type_name,
},
'description': description,
'type': 'array',
}
field_name = 'additionalProperties'
message.fields.append(self.__FieldDescriptorFromProperties(
field_name, len(properties) + 1, attrs))
self.__AddImport('from %s import encoding' % self.__base_files_package)
message.decorators.append(
'encoding.MapUnrecognizedFields(%r)' % field_name)
def AddDescriptorFromSchema(self, schema_name, schema):
"""Add a new MessageDescriptor named schema_name based on schema."""
# TODO(craigcitro): Is schema_name redundant?
if self.__GetDescriptor(schema_name):
return
if schema.get('enum'):
self.__DeclareEnum(schema_name, schema)
return
if schema.get('type') == 'any':
self.__DeclareMessageAlias(schema, 'extra_types.JsonValue')
return
if schema.get('type') != 'object':
raise ValueError('Cannot create message descriptors for type %s' %
schema.get('type'))
message = extended_descriptor.ExtendedMessageDescriptor()
message.name = self.__names.ClassName(schema['id'])
message.description = util.CleanDescription(schema.get(
'description', 'A %s object.' % message.name))
self.__DeclareDescriptor(message.name)
with self.__DescriptorEnv(message):
properties = schema.get('properties', {})
for index, (name, attrs) in enumerate(sorted(properties.items())):
field = self.__FieldDescriptorFromProperties(
name, index + 1, attrs)
message.fields.append(field)
if field.name != name:
message.field_mappings.append(
type(message).JsonFieldMapping(
python_name=field.name, json_name=name))
self.__AddImport(
'from %s import encoding' % self.__base_files_package)
if 'additionalProperties' in schema:
self.__AddAdditionalProperties(message, schema, properties)
self.__RegisterDescriptor(message)
def __AddAdditionalPropertyType(self, name, property_schema):
"""Add a new nested AdditionalProperty message."""
new_type_name = 'AdditionalProperty'
property_schema = dict(property_schema)
# We drop the description here on purpose, so the resulting
# messages are less repetitive.
property_schema.pop('description', None)
description = 'An additional property for a %s object.' % name
schema = {
'id': new_type_name,
'type': 'object',
'description': description,
'properties': {
'key': {
'type': 'string',
'description': 'Name of the additional property.',
},
'value': property_schema,
},
}
self.AddDescriptorFromSchema(new_type_name, schema)
return new_type_name
def __AddEntryType(self, entry_type_name, entry_schema, parent_name):
"""Add a type for a list entry."""
entry_schema.pop('description', None)
description = 'Single entry in a %s.' % parent_name
schema = {
'id': entry_type_name,
'type': 'object',
'description': description,
'properties': {
'entry': {
'type': 'array',
'items': entry_schema,
},
},
}
self.AddDescriptorFromSchema(entry_type_name, schema)
return entry_type_name
def __FieldDescriptorFromProperties(self, name, index, attrs):
"""Create a field descriptor for these attrs."""
field = descriptor.FieldDescriptor()
field.name = self.__names.CleanName(name)
field.number = index
field.label = self.__ComputeLabel(attrs)
new_type_name_hint = self.__names.ClassName(
'%sValue' % self.__names.ClassName(name))
type_info = self.__GetTypeInfo(attrs, new_type_name_hint)
field.type_name = type_info.type_name
field.variant = type_info.variant
if 'default' in attrs:
# TODO(craigcitro): Correctly handle non-primitive default values.
default = attrs['default']
if not (field.type_name == 'string' or
field.variant == messages.Variant.ENUM):
default = str(json.loads(default))
if field.variant == messages.Variant.ENUM:
default = self.__names.NormalizeEnumName(default)
field.default_value = default
extended_field = extended_descriptor.ExtendedFieldDescriptor()
extended_field.name = field.name
extended_field.description = util.CleanDescription(
attrs.get('description', 'A %s attribute.' % field.type_name))
extended_field.field_descriptor = field
return extended_field
@staticmethod
def __ComputeLabel(attrs):
if attrs.get('required', False):
return descriptor.FieldDescriptor.Label.REQUIRED
elif attrs.get('type') == 'array':
return descriptor.FieldDescriptor.Label.REPEATED
elif attrs.get('repeated'):
return descriptor.FieldDescriptor.Label.REPEATED
return descriptor.FieldDescriptor.Label.OPTIONAL
def __DeclareEnum(self, enum_name, attrs):
description = util.CleanDescription(attrs.get('description', ''))
enum_values = attrs['enum']
enum_descriptions = attrs.get(
'enumDescriptions', [''] * len(enum_values))
self.AddEnumDescriptor(enum_name, description,
enum_values, enum_descriptions)
self.__AddIfUnknown(enum_name)
return TypeInfo(type_name=enum_name, variant=messages.Variant.ENUM)
def __AddIfUnknown(self, type_name):
type_name = self.__names.ClassName(type_name)
full_type_name = self.__ComputeFullName(type_name)
if (full_type_name not in self.__message_registry.keys() and
type_name not in self.__message_registry.keys()):
self.__unknown_types.add(type_name)
def __GetTypeInfo(self, attrs, name_hint):
"""Return a TypeInfo object for attrs, creating one if needed."""
type_ref = self.__names.ClassName(attrs.get('$ref'))
type_name = attrs.get('type')
if not (type_ref or type_name):
raise ValueError('No type found for %s' % attrs)
if type_ref:
self.__AddIfUnknown(type_ref)
# We don't actually know this is a message -- it might be an
# enum. However, we can't check that until we've created all the
# types, so we come back and fix this up later.
return TypeInfo(
type_name=type_ref, variant=messages.Variant.MESSAGE)
if 'enum' in attrs:
enum_name = '%sValuesEnum' % name_hint
return self.__DeclareEnum(enum_name, attrs)
if 'format' in attrs:
type_info = self.PRIMITIVE_FORMAT_MAP.get(attrs['format'])
# NOTE: If we don't recognize the format, the spec says we fall back
# to just using the type name.
if type_info is not None:
if type_info.type_name.startswith((
'apitools.base.protorpclite.message_types.',
'message_types.')):
self.__AddImport(
'from %s import message_types as _message_types' %
self.__protorpc_package)
if type_info.type_name.startswith('extra_types.'):
self.__AddImport(
'from %s import extra_types' % self.__base_files_package)
return type_info
if type_name in self.PRIMITIVE_TYPE_INFO_MAP:
type_info = self.PRIMITIVE_TYPE_INFO_MAP[type_name]
if type_info.type_name.startswith('extra_types.'):
self.__AddImport(
'from %s import extra_types' % self.__base_files_package)
return type_info
if type_name == 'array':
items = attrs.get('items')
if not items:
raise ValueError('Array type with no item type: %s' % attrs)
entry_name_hint = self.__names.ClassName(
items.get('title') or '%sListEntry' % name_hint)
entry_label = self.__ComputeLabel(items)
if entry_label == descriptor.FieldDescriptor.Label.REPEATED:
parent_name = self.__names.ClassName(
items.get('title') or name_hint)
entry_type_name = self.__AddEntryType(
entry_name_hint, items.get('items'), parent_name)
return TypeInfo(type_name=entry_type_name,
variant=messages.Variant.MESSAGE)
return self.__GetTypeInfo(items, entry_name_hint)
elif type_name == 'any':
self.__AddImport('from %s import extra_types' %
self.__base_files_package)
return self.PRIMITIVE_TYPE_INFO_MAP['any']
elif type_name == 'object':
# TODO(craigcitro): Think of a better way to come up with names.
if not name_hint:
raise ValueError(
'Cannot create subtype without some name hint')
schema = dict(attrs)
schema['id'] = name_hint
self.AddDescriptorFromSchema(name_hint, schema)
self.__AddIfUnknown(name_hint)
return TypeInfo(
type_name=name_hint, variant=messages.Variant.MESSAGE)
raise ValueError('Unknown type: %s' % type_name)
def FixupMessageFields(self):
for message_type in self.file_descriptor.message_types:
self._FixupMessage(message_type)
def _FixupMessage(self, message_type):
with self.__DescriptorEnv(message_type):
for field in message_type.fields:
if field.field_descriptor.variant == messages.Variant.MESSAGE:
field_type_name = field.field_descriptor.type_name
field_type = self.LookupDescriptor(field_type_name)
if isinstance(field_type,
extended_descriptor.ExtendedEnumDescriptor):
field.field_descriptor.variant = messages.Variant.ENUM
for submessage_type in message_type.message_types:
self._FixupMessage(submessage_type)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/gen/gen_client.py | apitools/gen/gen_client.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command-line interface to gen_client."""
import argparse
import contextlib
import io
import json
import logging
import os
import pkgutil
import sys
from apitools.base.py import exceptions
from apitools.gen import gen_client_lib
from apitools.gen import util
def _CopyLocalFile(filename):
with contextlib.closing(io.open(filename, 'w')) as out:
src_data = pkgutil.get_data(
'apitools.base.py', filename)
if src_data is None:
raise exceptions.GeneratedClientError(
'Could not find file %s' % filename)
out.write(src_data)
def _GetDiscoveryDocFromFlags(args):
"""Get the discovery doc from flags."""
if args.discovery_url:
try:
return util.FetchDiscoveryDoc(args.discovery_url)
except exceptions.CommunicationError:
raise exceptions.GeneratedClientError(
'Could not fetch discovery doc')
infile = os.path.expanduser(args.infile) or '/dev/stdin'
with io.open(infile, encoding='utf8') as f:
return json.loads(util.ReplaceHomoglyphs(f.read()))
def _GetCodegenFromFlags(args):
"""Create a codegen object from flags."""
discovery_doc = _GetDiscoveryDocFromFlags(args)
names = util.Names(
args.strip_prefix,
args.experimental_name_convention,
args.experimental_capitalize_enums)
if args.client_json:
try:
with io.open(args.client_json, encoding='utf8') as client_json:
f = json.loads(util.ReplaceHomoglyphs(client_json.read()))
web = f.get('installed', f.get('web', {}))
client_id = web.get('client_id')
client_secret = web.get('client_secret')
except IOError:
raise exceptions.NotFoundError(
'Failed to open client json file: %s' % args.client_json)
else:
client_id = args.client_id
client_secret = args.client_secret
if not client_id:
logging.warning('No client ID supplied')
client_id = ''
if not client_secret:
logging.warning('No client secret supplied')
client_secret = ''
client_info = util.ClientInfo.Create(
discovery_doc, args.scope, client_id, client_secret,
args.user_agent, names, args.api_key, args.version_identifier)
outdir = os.path.expanduser(args.outdir) or client_info.default_directory
if os.path.exists(outdir) and not args.overwrite:
raise exceptions.ConfigurationValueError(
'Output directory exists, pass --overwrite to replace '
'the existing files.')
if not os.path.exists(outdir):
os.makedirs(outdir)
return gen_client_lib.DescriptorGenerator(
discovery_doc, client_info, names, args.root_package, outdir,
base_package=args.base_package,
protorpc_package=args.protorpc_package,
init_wildcards_file=(args.init_file == 'wildcards'),
use_proto2=args.experimental_proto2_output,
unelidable_request_methods=args.unelidable_request_methods,
apitools_version=args.apitools_version)
# TODO(craigcitro): Delete this if we don't need this functionality.
def _WriteBaseFiles(codegen):
with util.Chdir(codegen.outdir):
_CopyLocalFile('base_api.py')
_CopyLocalFile('credentials_lib.py')
_CopyLocalFile('exceptions.py')
def _WriteIntermediateInit(codegen):
with io.open('__init__.py', 'w') as out:
codegen.WriteIntermediateInit(out)
def _WriteProtoFiles(codegen):
with util.Chdir(codegen.outdir):
with io.open(codegen.client_info.messages_proto_file_name, 'w') as out:
codegen.WriteMessagesProtoFile(out)
with io.open(codegen.client_info.services_proto_file_name, 'w') as out:
codegen.WriteServicesProtoFile(out)
def _WriteGeneratedFiles(args, codegen):
if codegen.use_proto2:
_WriteProtoFiles(codegen)
with util.Chdir(codegen.outdir):
with io.open(codegen.client_info.messages_file_name, 'w') as out:
codegen.WriteMessagesFile(out)
with io.open(codegen.client_info.client_file_name, 'w') as out:
codegen.WriteClientLibrary(out)
def _WriteInit(codegen):
with util.Chdir(codegen.outdir):
with io.open('__init__.py', 'w') as out:
codegen.WriteInit(out)
def _WriteSetupPy(codegen):
with io.open('setup.py', 'w') as out:
codegen.WriteSetupPy(out)
def GenerateClient(args):
"""Driver for client code generation."""
codegen = _GetCodegenFromFlags(args)
if codegen is None:
logging.error('Failed to create codegen, exiting.')
return 128
_WriteGeneratedFiles(args, codegen)
if args.init_file != 'none':
_WriteInit(codegen)
def GeneratePipPackage(args):
"""Generate a client as a pip-installable tarball."""
discovery_doc = _GetDiscoveryDocFromFlags(args)
package = discovery_doc['name']
original_outdir = os.path.expanduser(args.outdir)
args.outdir = os.path.join(
args.outdir, 'apitools/clients/%s' % package)
args.root_package = 'apitools.clients.%s' % package
codegen = _GetCodegenFromFlags(args)
if codegen is None:
logging.error('Failed to create codegen, exiting.')
return 1
_WriteGeneratedFiles(args, codegen)
_WriteInit(codegen)
with util.Chdir(original_outdir):
_WriteSetupPy(codegen)
with util.Chdir('apitools'):
_WriteIntermediateInit(codegen)
with util.Chdir('clients'):
_WriteIntermediateInit(codegen)
def GenerateProto(args):
"""Generate just the two proto files for a given API."""
codegen = _GetCodegenFromFlags(args)
_WriteProtoFiles(codegen)
class _SplitCommaSeparatedList(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values.split(','))
def main(argv=None):
if argv is None:
argv = sys.argv
parser = argparse.ArgumentParser(
description='Apitools Client Code Generator')
discovery_group = parser.add_mutually_exclusive_group()
discovery_group.add_argument(
'--infile',
help=('Filename for the discovery document. Mutually exclusive with '
'--discovery_url'))
discovery_group.add_argument(
'--discovery_url',
help=('URL (or "name.version") of the discovery document to use. '
'Mutually exclusive with --infile.'))
parser.add_argument(
'--base_package',
default='apitools.base.py',
help='Base package path of apitools (defaults to apitools.base.py')
parser.add_argument(
'--protorpc_package',
default='apitools.base.protorpclite',
help=('Base package path of protorpc '
'(defaults to apitools.base.protorpclite'))
parser.add_argument(
'--version-identifier',
help=('Version identifier to use for the generated client (defaults to '
'"version" value in discovery doc). This must be a valid '
'identifier when used in a Python module name.'))
parser.add_argument(
'--outdir',
default='',
help='Directory name for output files. (Defaults to the API name.)')
parser.add_argument(
'--overwrite',
default=False, action='store_true',
help='Only overwrite the output directory if this flag is specified.')
parser.add_argument(
'--root_package',
default='',
help=('Python import path for where these modules '
'should be imported from.'))
parser.add_argument(
'--strip_prefix', nargs='*',
default=[],
help=('Prefix to strip from type names in the discovery document. '
'(May be specified multiple times.)'))
parser.add_argument(
'--api_key',
help=('API key to use for API access.'))
parser.add_argument(
'--client_json',
help=('Use the given file downloaded from the dev. console for '
'client_id and client_secret.'))
parser.add_argument(
'--client_id',
default='CLIENT_ID',
help='Client ID to use for the generated client.')
parser.add_argument(
'--client_secret',
default='CLIENT_SECRET',
help='Client secret for the generated client.')
parser.add_argument(
'--scope', nargs='*',
default=[],
help=('Scopes to request in the generated client. '
'May be specified more than once.'))
parser.add_argument(
'--user_agent',
default='x_Tw5K8nnjoRAqULM9PFAC2b',
help=('User agent for the generated client. '
'Defaults to <api>-generated/0.1.'))
parser.add_argument(
'--generate_cli', dest='generate_cli', action='store_true',
help='Ignored.')
parser.add_argument(
'--nogenerate_cli', dest='generate_cli', action='store_false',
help='Ignored.')
parser.add_argument(
'--init-file',
choices=['none', 'empty', 'wildcards'],
type=lambda s: s.lower(),
default='wildcards',
help='Controls whether and how to generate package __init__.py file.')
parser.add_argument(
'--unelidable_request_methods',
action=_SplitCommaSeparatedList,
default=[],
help=('Full method IDs of methods for which we should NOT try to '
'elide the request type. (Should be a comma-separated list.'))
parser.add_argument(
'--apitools_version',
default='', dest='apitools_version',
help=('Apitools version used as a requirement in generated clients. '
'Defaults to version of apitools used to generate the clients.'))
parser.add_argument(
'--experimental_capitalize_enums',
default=False, action='store_true',
help='Dangerous: attempt to rewrite enum values to be uppercase.')
parser.add_argument(
'--experimental_name_convention',
choices=util.Names.NAME_CONVENTIONS,
default=util.Names.DEFAULT_NAME_CONVENTION,
help='Dangerous: use a particular style for generated names.')
parser.add_argument(
'--experimental_proto2_output',
default=False, action='store_true',
help='Dangerous: also output a proto2 message file.')
subparsers = parser.add_subparsers(help='Type of generated code')
client_parser = subparsers.add_parser(
'client', help='Generate apitools client in destination folder')
client_parser.set_defaults(func=GenerateClient)
pip_package_parser = subparsers.add_parser(
'pip_package', help='Generate apitools client pip package')
pip_package_parser.set_defaults(func=GeneratePipPackage)
proto_parser = subparsers.add_parser(
'proto', help='Generate apitools client protos')
proto_parser.set_defaults(func=GenerateProto)
args = parser.parse_args(argv[1:])
return args.func(args) or 0
if __name__ == '__main__':
sys.exit(main())
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/gen/__init__.py | apitools/gen/__init__.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared __init__.py for apitools."""
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/gen/client_generation_test.py | apitools/gen/client_generation_test.py | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test gen_client against all the APIs we use regularly."""
import importlib
import logging
import os
import six
import subprocess
import sys
import tempfile
import unittest
from apitools.gen import gen_client
from apitools.gen import test_utils
_API_LIST = [
'bigquery.v2',
'compute.v1',
'drive.v3',
'storage.v1',
]
class ClientGenerationTest(unittest.TestCase):
def setUp(self):
super(ClientGenerationTest, self).setUp()
self.gen_client_binary = 'gen_client'
@test_utils.SkipOnWindows
def testGeneration(self):
for api in _API_LIST:
with test_utils.TempDir(change_to=True):
args = [
self.gen_client_binary,
'--client_id=12345',
'--client_secret=67890',
'--discovery_url=%s' % api,
'--outdir=generated',
'--overwrite',
'client',
]
logging.info('Testing API %s with command line: %s',
api, ' '.join(args))
retcode = gen_client.main(args)
if retcode == 128:
logging.error('Failed to fetch discovery doc, continuing.')
continue
self.assertEqual(0, retcode)
sys.path.insert(0, os.path.join(os.getcwd(), 'generated'))
# Ensure we can import the generated client.
importlib.import_module('{}_{}_client'.format(
*api.split('.')))
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/gen/extended_descriptor.py | apitools/gen/extended_descriptor.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Extended protorpc descriptors.
This takes existing protorpc Descriptor classes and adds extra
properties not directly supported in proto itself, notably field and
message descriptions. We need this in order to generate protorpc
message files with comments.
Note that for most of these classes, we can't simply wrap the existing
message, since we need to change the type of the subfields. We could
have a "plain" descriptor attached, but that seems like unnecessary
bookkeeping. Where possible, we purposely reuse existing tag numbers;
for new fields, we start numbering at 100.
"""
import abc
import operator
import textwrap
import six
from apitools.base.protorpclite import descriptor as protorpc_descriptor
from apitools.base.protorpclite import message_types
from apitools.base.protorpclite import messages
from apitools.base.py import extra_types
class ExtendedEnumValueDescriptor(messages.Message):
"""Enum value descriptor with additional fields.
Fields:
name: Name of enumeration value.
number: Number of enumeration value.
description: Description of this enum value.
"""
name = messages.StringField(1)
number = messages.IntegerField(2, variant=messages.Variant.INT32)
description = messages.StringField(100)
class ExtendedEnumDescriptor(messages.Message):
"""Enum class descriptor with additional fields.
Fields:
name: Name of Enum without any qualification.
values: Values defined by Enum class.
description: Description of this enum class.
full_name: Fully qualified name of this enum class.
enum_mappings: Mappings from python to JSON names for enum values.
"""
class JsonEnumMapping(messages.Message):
"""Mapping from a python name to the wire name for an enum."""
python_name = messages.StringField(1)
json_name = messages.StringField(2)
name = messages.StringField(1)
values = messages.MessageField(
ExtendedEnumValueDescriptor, 2, repeated=True)
description = messages.StringField(100)
full_name = messages.StringField(101)
enum_mappings = messages.MessageField(
'JsonEnumMapping', 102, repeated=True)
class ExtendedFieldDescriptor(messages.Message):
"""Field descriptor with additional fields.
Fields:
field_descriptor: The underlying field descriptor.
name: The name of this field.
description: Description of this field.
"""
field_descriptor = messages.MessageField(
protorpc_descriptor.FieldDescriptor, 100)
# We duplicate the names for easier bookkeeping.
name = messages.StringField(101)
description = messages.StringField(102)
class ExtendedMessageDescriptor(messages.Message):
"""Message descriptor with additional fields.
Fields:
name: Name of Message without any qualification.
fields: Fields defined for message.
message_types: Nested Message classes defined on message.
enum_types: Nested Enum classes defined on message.
description: Description of this message.
full_name: Full qualified name of this message.
decorators: Decorators to include in the definition when printing.
Printed in the given order from top to bottom (so the last entry
is the innermost decorator).
alias_for: This type is just an alias for the named type.
field_mappings: Mappings from python to json field names.
"""
class JsonFieldMapping(messages.Message):
"""Mapping from a python name to the wire name for a field."""
python_name = messages.StringField(1)
json_name = messages.StringField(2)
name = messages.StringField(1)
fields = messages.MessageField(ExtendedFieldDescriptor, 2, repeated=True)
message_types = messages.MessageField(
'extended_descriptor.ExtendedMessageDescriptor', 3, repeated=True)
enum_types = messages.MessageField(
ExtendedEnumDescriptor, 4, repeated=True)
description = messages.StringField(100)
full_name = messages.StringField(101)
decorators = messages.StringField(102, repeated=True)
alias_for = messages.StringField(103)
field_mappings = messages.MessageField(
'JsonFieldMapping', 104, repeated=True)
class ExtendedFileDescriptor(messages.Message):
"""File descriptor with additional fields.
Fields:
package: Fully qualified name of package that definitions belong to.
message_types: Message definitions contained in file.
enum_types: Enum definitions contained in file.
description: Description of this file.
additional_imports: Extra imports used in this package.
"""
package = messages.StringField(2)
message_types = messages.MessageField(
ExtendedMessageDescriptor, 4, repeated=True)
enum_types = messages.MessageField(
ExtendedEnumDescriptor, 5, repeated=True)
description = messages.StringField(100)
additional_imports = messages.StringField(101, repeated=True)
def _WriteFile(file_descriptor, package, version, proto_printer):
"""Write the given extended file descriptor to the printer."""
proto_printer.PrintPreamble(package, version, file_descriptor)
_PrintEnums(proto_printer, file_descriptor.enum_types)
_PrintMessages(proto_printer, file_descriptor.message_types)
custom_json_mappings = _FetchCustomMappings(file_descriptor.enum_types)
custom_json_mappings.extend(
_FetchCustomMappings(file_descriptor.message_types))
for mapping in custom_json_mappings:
proto_printer.PrintCustomJsonMapping(mapping)
def WriteMessagesFile(file_descriptor, package, version, printer):
"""Write the given extended file descriptor to out as a message file."""
_WriteFile(file_descriptor, package, version,
_Proto2Printer(printer))
def WritePythonFile(file_descriptor, package, version, printer):
"""Write the given extended file descriptor to out."""
_WriteFile(file_descriptor, package, version,
_ProtoRpcPrinter(printer))
def PrintIndentedDescriptions(printer, ls, name, prefix=''):
if ls:
with printer.Indent(indent=prefix):
with printer.CommentContext():
width = printer.CalculateWidth() - len(prefix)
printer()
printer(name + ':')
for x in ls:
description = '%s: %s' % (x.name, x.description)
for line in textwrap.wrap(description, width,
initial_indent=' ',
subsequent_indent=' '):
printer(line)
def _FetchCustomMappings(descriptor_ls):
"""Find and return all custom mappings for descriptors in descriptor_ls."""
custom_mappings = []
for descriptor in descriptor_ls:
if isinstance(descriptor, ExtendedEnumDescriptor):
custom_mappings.extend(
_FormatCustomJsonMapping('Enum', m, descriptor)
for m in descriptor.enum_mappings)
elif isinstance(descriptor, ExtendedMessageDescriptor):
custom_mappings.extend(
_FormatCustomJsonMapping('Field', m, descriptor)
for m in descriptor.field_mappings)
custom_mappings.extend(
_FetchCustomMappings(descriptor.enum_types))
custom_mappings.extend(
_FetchCustomMappings(descriptor.message_types))
return custom_mappings
def _FormatCustomJsonMapping(mapping_type, mapping, descriptor):
return '\n'.join((
'encoding.AddCustomJson%sMapping(' % mapping_type,
" %s, '%s', '%s')" % (descriptor.full_name, mapping.python_name,
mapping.json_name),
))
def _EmptyMessage(message_type):
return not any((message_type.enum_types,
message_type.message_types,
message_type.fields))
class ProtoPrinter(six.with_metaclass(abc.ABCMeta, object)):
"""Interface for proto printers."""
@abc.abstractmethod
def PrintPreamble(self, package, version, file_descriptor):
"""Print the file docstring and import lines."""
@abc.abstractmethod
def PrintEnum(self, enum_type):
"""Print the given enum declaration."""
@abc.abstractmethod
def PrintMessage(self, message_type):
"""Print the given message declaration."""
class _Proto2Printer(ProtoPrinter):
"""Printer for proto2 definitions."""
def __init__(self, printer):
self.__printer = printer
def __PrintEnumCommentLines(self, enum_type):
description = enum_type.description or '%s enum type.' % enum_type.name
for line in textwrap.wrap(description,
self.__printer.CalculateWidth() - 3):
self.__printer('// %s', line)
PrintIndentedDescriptions(self.__printer, enum_type.values, 'Values',
prefix='// ')
def __PrintEnumValueCommentLines(self, enum_value):
if enum_value.description:
width = self.__printer.CalculateWidth() - 3
for line in textwrap.wrap(enum_value.description, width):
self.__printer('// %s', line)
def PrintEnum(self, enum_type):
self.__PrintEnumCommentLines(enum_type)
self.__printer('enum %s {', enum_type.name)
with self.__printer.Indent():
enum_values = sorted(
enum_type.values, key=operator.attrgetter('number'))
for enum_value in enum_values:
self.__printer()
self.__PrintEnumValueCommentLines(enum_value)
self.__printer('%s = %s;', enum_value.name, enum_value.number)
self.__printer('}')
self.__printer()
def PrintPreamble(self, package, version, file_descriptor):
self.__printer('// Generated message classes for %s version %s.',
package, version)
self.__printer('// NOTE: This file is autogenerated and should not be '
'edited by hand.')
description_lines = textwrap.wrap(file_descriptor.description, 75)
if description_lines:
self.__printer('//')
for line in description_lines:
self.__printer('// %s', line)
self.__printer()
self.__printer('syntax = "proto2";')
self.__printer('package %s;', file_descriptor.package)
def __PrintMessageCommentLines(self, message_type):
"""Print the description of this message."""
description = message_type.description or '%s message type.' % (
message_type.name)
width = self.__printer.CalculateWidth() - 3
for line in textwrap.wrap(description, width):
self.__printer('// %s', line)
PrintIndentedDescriptions(self.__printer, message_type.enum_types,
'Enums', prefix='// ')
PrintIndentedDescriptions(self.__printer, message_type.message_types,
'Messages', prefix='// ')
PrintIndentedDescriptions(self.__printer, message_type.fields,
'Fields', prefix='// ')
def __PrintFieldDescription(self, description):
for line in textwrap.wrap(description,
self.__printer.CalculateWidth() - 3):
self.__printer('// %s', line)
def __PrintFields(self, fields):
for extended_field in fields:
field = extended_field.field_descriptor
field_type = messages.Field.lookup_field_type_by_variant(
field.variant)
self.__printer()
self.__PrintFieldDescription(extended_field.description)
label = str(field.label).lower()
if field_type in (messages.EnumField, messages.MessageField):
proto_type = field.type_name
else:
proto_type = str(field.variant).lower()
default_statement = ''
if field.default_value:
if field_type in [messages.BytesField, messages.StringField]:
default_value = '"%s"' % field.default_value
elif field_type is messages.BooleanField:
default_value = str(field.default_value).lower()
else:
default_value = str(field.default_value)
default_statement = ' [default = %s]' % default_value
self.__printer(
'%s %s %s = %d%s;',
label, proto_type, field.name, field.number, default_statement)
def PrintMessage(self, message_type):
self.__printer()
self.__PrintMessageCommentLines(message_type)
if _EmptyMessage(message_type):
self.__printer('message %s {}', message_type.name)
return
self.__printer('message %s {', message_type.name)
with self.__printer.Indent():
_PrintEnums(self, message_type.enum_types)
_PrintMessages(self, message_type.message_types)
self.__PrintFields(message_type.fields)
self.__printer('}')
def PrintCustomJsonMapping(self, mapping_lines):
raise NotImplementedError(
'Custom JSON encoding not supported for proto2')
class _ProtoRpcPrinter(ProtoPrinter):
"""Printer for ProtoRPC definitions."""
def __init__(self, printer):
self.__printer = printer
def __PrintClassSeparator(self):
self.__printer()
if not self.__printer.indent:
self.__printer()
def __PrintEnumDocstringLines(self, enum_type):
description = enum_type.description or '%s enum type.' % enum_type.name
for line in textwrap.wrap('r"""%s' % description,
self.__printer.CalculateWidth()):
self.__printer(line)
PrintIndentedDescriptions(self.__printer, enum_type.values, 'Values')
self.__printer('"""')
def PrintEnum(self, enum_type):
self.__printer('class %s(_messages.Enum):', enum_type.name)
with self.__printer.Indent():
self.__PrintEnumDocstringLines(enum_type)
enum_values = sorted(
enum_type.values, key=operator.attrgetter('number'))
for enum_value in enum_values:
self.__printer('%s = %s', enum_value.name, enum_value.number)
if not enum_type.values:
self.__printer('pass')
self.__PrintClassSeparator()
def __PrintAdditionalImports(self, imports):
"""Print additional imports needed for protorpc."""
google_imports = [x for x in imports if 'google' in x]
other_imports = [x for x in imports if 'google' not in x]
if other_imports:
for import_ in sorted(other_imports):
self.__printer(import_)
self.__printer()
# Note: If we ever were going to add imports from this package, we'd
# need to sort those out and put them at the end.
if google_imports:
for import_ in sorted(google_imports):
self.__printer(import_)
self.__printer()
def PrintPreamble(self, package, version, file_descriptor):
self.__printer('"""Generated message classes for %s version %s.',
package, version)
self.__printer()
for line in textwrap.wrap(file_descriptor.description, 78):
self.__printer(line)
self.__printer('"""')
self.__printer('# NOTE: This file is autogenerated and should not be '
'edited by hand.')
self.__printer()
self.__printer('from __future__ import absolute_import')
self.__printer()
self.__PrintAdditionalImports(file_descriptor.additional_imports)
self.__printer()
self.__printer("package = '%s'", file_descriptor.package)
self.__printer()
self.__printer()
def __PrintMessageDocstringLines(self, message_type):
"""Print the docstring for this message."""
description = message_type.description or '%s message type.' % (
message_type.name)
short_description = (
_EmptyMessage(message_type) and
len(description) < (self.__printer.CalculateWidth() - 6))
with self.__printer.CommentContext():
if short_description:
# Note that we use explicit string interpolation here since
# we're in comment context.
self.__printer('r"""%s"""' % description)
return
for line in textwrap.wrap('r"""%s' % description,
self.__printer.CalculateWidth()):
self.__printer(line)
PrintIndentedDescriptions(self.__printer, message_type.enum_types,
'Enums')
PrintIndentedDescriptions(
self.__printer, message_type.message_types, 'Messages')
PrintIndentedDescriptions(
self.__printer, message_type.fields, 'Fields')
self.__printer('"""')
self.__printer()
def PrintMessage(self, message_type):
if message_type.alias_for:
self.__printer(
'%s = %s', message_type.name, message_type.alias_for)
self.__PrintClassSeparator()
return
for decorator in message_type.decorators:
self.__printer('@%s', decorator)
self.__printer('class %s(_messages.Message):', message_type.name)
with self.__printer.Indent():
self.__PrintMessageDocstringLines(message_type)
_PrintEnums(self, message_type.enum_types)
_PrintMessages(self, message_type.message_types)
_PrintFields(message_type.fields, self.__printer)
self.__PrintClassSeparator()
def PrintCustomJsonMapping(self, mapping):
self.__printer(mapping)
def _PrintEnums(proto_printer, enum_types):
"""Print all enums to the given proto_printer."""
enum_types = sorted(enum_types, key=operator.attrgetter('name'))
for enum_type in enum_types:
proto_printer.PrintEnum(enum_type)
def _PrintMessages(proto_printer, message_list):
message_list = sorted(message_list, key=operator.attrgetter('name'))
for message_type in message_list:
proto_printer.PrintMessage(message_type)
_MESSAGE_FIELD_MAP = {
message_types.DateTimeMessage.definition_name(): (
message_types.DateTimeField),
}
def _PrintFields(fields, printer):
for extended_field in fields:
field = extended_field.field_descriptor
printed_field_info = {
'name': field.name,
'module': '_messages',
'type_name': '',
'type_format': '',
'number': field.number,
'label_format': '',
'variant_format': '',
'default_format': '',
}
message_field = _MESSAGE_FIELD_MAP.get(field.type_name)
if message_field:
printed_field_info['module'] = '_message_types'
field_type = message_field
elif field.type_name == 'extra_types.DateField':
printed_field_info['module'] = 'extra_types'
field_type = extra_types.DateField
else:
field_type = messages.Field.lookup_field_type_by_variant(
field.variant)
if field_type in (messages.EnumField, messages.MessageField):
printed_field_info['type_format'] = "'%s', " % field.type_name
if field.label == protorpc_descriptor.FieldDescriptor.Label.REQUIRED:
printed_field_info['label_format'] = ', required=True'
elif field.label == protorpc_descriptor.FieldDescriptor.Label.REPEATED:
printed_field_info['label_format'] = ', repeated=True'
if field_type.DEFAULT_VARIANT != field.variant:
printed_field_info['variant_format'] = (
', variant=_messages.Variant.%s' % field.variant)
if field.default_value:
if field_type in [messages.BytesField, messages.StringField]:
default_value = repr(field.default_value)
elif field_type is messages.EnumField:
try:
default_value = str(int(field.default_value))
except ValueError:
default_value = repr(field.default_value)
else:
default_value = field.default_value
printed_field_info[
'default_format'] = ', default=%s' % (default_value,)
printed_field_info['type_name'] = field_type.__name__
args = ''.join('%%(%s)s' % field for field in (
'type_format',
'number',
'label_format',
'variant_format',
'default_format'))
format_str = '%%(name)s = %%(module)s.%%(type_name)s(%s)' % args
printer(format_str % printed_field_info)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
google/apitools | https://github.com/google/apitools/blob/1b14f58b3c895f542c724028dd0fb8ae0d816510/apitools/gen/service_registry.py | apitools/gen/service_registry.py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Service registry for apitools."""
import collections
import logging
import re
import textwrap
from apitools.base.py import base_api
from apitools.gen import util
# We're a code generator. I don't care.
# pylint:disable=too-many-statements
_MIME_PATTERN_RE = re.compile(r'(?i)[a-z0-9_*-]+/[a-z0-9_*-]+')
class ServiceRegistry(object):
"""Registry for service types."""
def __init__(self, client_info, message_registry,
names, root_package, base_files_package,
unelidable_request_methods):
self.__client_info = client_info
self.__package = client_info.package
self.__names = names
self.__service_method_info_map = collections.OrderedDict()
self.__message_registry = message_registry
self.__root_package = root_package
self.__base_files_package = base_files_package
self.__unelidable_request_methods = unelidable_request_methods
self.__all_scopes = set(self.__client_info.scopes)
def Validate(self):
self.__message_registry.Validate()
@property
def scopes(self):
return sorted(list(self.__all_scopes))
def __GetServiceClassName(self, service_name):
return self.__names.ClassName(
'%sService' % self.__names.ClassName(service_name))
def __PrintDocstring(self, printer, method_info, method_name, name):
"""Print a docstring for a service method."""
if method_info.description:
description = util.CleanDescription(method_info.description)
first_line, newline, remaining = method_info.description.partition(
'\n')
if not first_line.endswith('.'):
first_line = '%s.' % first_line
description = '%s%s%s' % (first_line, newline, remaining)
else:
description = '%s method for the %s service.' % (method_name, name)
with printer.CommentContext():
printer('r"""%s' % description)
printer()
printer('Args:')
printer(' request: (%s) input message', method_info.request_type_name)
printer(' global_params: (StandardQueryParameters, default: None) '
'global arguments')
if method_info.upload_config:
printer(' upload: (Upload, default: None) If present, upload')
printer(' this stream with the request.')
if method_info.supports_download:
printer(
' download: (Download, default: None) If present, download')
printer(' data from the request via this stream.')
printer('Returns:')
printer(' (%s) The response message.', method_info.response_type_name)
printer('"""')
def __WriteSingleService(
self, printer, name, method_info_map, client_class_name):
printer()
class_name = self.__GetServiceClassName(name)
printer('class %s(base_api.BaseApiService):', class_name)
with printer.Indent():
printer('"""Service class for the %s resource."""', name)
printer()
printer('_NAME = %s', repr(name))
# Print the configs for the methods first.
printer()
printer('def __init__(self, client):')
with printer.Indent():
printer('super(%s.%s, self).__init__(client)',
client_class_name, class_name)
printer('self._upload_configs = {')
with printer.Indent(indent=' '):
for method_name, method_info in method_info_map.items():
upload_config = method_info.upload_config
if upload_config is not None:
printer(
"'%s': base_api.ApiUploadInfo(", method_name)
with printer.Indent(indent=' '):
attrs = sorted(
x.name for x in upload_config.all_fields())
for attr in attrs:
printer('%s=%r,',
attr, getattr(upload_config, attr))
printer('),')
printer('}')
# Now write each method in turn.
for method_name, method_info in method_info_map.items():
printer()
params = ['self', 'request', 'global_params=None']
if method_info.upload_config:
params.append('upload=None')
if method_info.supports_download:
params.append('download=None')
printer('def %s(%s):', method_name, ', '.join(params))
with printer.Indent():
self.__PrintDocstring(
printer, method_info, method_name, name)
printer("config = self.GetMethodConfig('%s')", method_name)
upload_config = method_info.upload_config
if upload_config is not None:
printer("upload_config = self.GetUploadConfig('%s')",
method_name)
arg_lines = [
'config, request, global_params=global_params']
if method_info.upload_config:
arg_lines.append(
'upload=upload, upload_config=upload_config')
if method_info.supports_download:
arg_lines.append('download=download')
printer('return self._RunMethod(')
with printer.Indent(indent=' '):
for line in arg_lines[:-1]:
printer('%s,', line)
printer('%s)', arg_lines[-1])
printer()
printer('{0}.method_config = lambda: base_api.ApiMethodInfo('
.format(method_name))
with printer.Indent(indent=' '):
method_info = method_info_map[method_name]
attrs = sorted(
x.name for x in method_info.all_fields())
for attr in attrs:
if attr in ('upload_config', 'description'):
continue
value = getattr(method_info, attr)
if value is not None:
printer('%s=%r,', attr, value)
printer(')')
def __WriteProtoServiceDeclaration(self, printer, name, method_info_map):
"""Write a single service declaration to a proto file."""
printer()
printer('service %s {', self.__GetServiceClassName(name))
with printer.Indent():
for method_name, method_info in method_info_map.items():
for line in textwrap.wrap(method_info.description,
printer.CalculateWidth() - 3):
printer('// %s', line)
printer('rpc %s (%s) returns (%s);',
method_name,
method_info.request_type_name,
method_info.response_type_name)
printer('}')
def WriteProtoFile(self, printer):
"""Write the services in this registry to out as proto."""
self.Validate()
client_info = self.__client_info
printer('// Generated services for %s version %s.',
client_info.package, client_info.version)
printer()
printer('syntax = "proto2";')
printer('package %s;', self.__package)
printer('import "%s";', client_info.messages_proto_file_name)
printer()
for name, method_info_map in self.__service_method_info_map.items():
self.__WriteProtoServiceDeclaration(printer, name, method_info_map)
def WriteFile(self, printer):
"""Write the services in this registry to out."""
self.Validate()
client_info = self.__client_info
printer('"""Generated client library for %s version %s."""',
client_info.package, client_info.version)
printer('# NOTE: This file is autogenerated and should not be edited '
'by hand.')
printer()
printer('from __future__ import absolute_import')
printer()
printer('from %s import base_api', self.__base_files_package)
if self.__root_package:
import_prefix = 'from {0} '.format(self.__root_package)
else:
import_prefix = ''
printer('%simport %s as messages', import_prefix,
client_info.messages_rule_name)
printer()
printer()
printer('class %s(base_api.BaseApiClient):',
client_info.client_class_name)
with printer.Indent():
printer(
'"""Generated client library for service %s version %s."""',
client_info.package, client_info.version)
printer()
printer('MESSAGES_MODULE = messages')
printer('BASE_URL = {0!r}'.format(client_info.base_url))
printer('MTLS_BASE_URL = {0!r}'.format(client_info.mtls_base_url))
printer()
printer('_PACKAGE = {0!r}'.format(client_info.package))
printer('_SCOPES = {0!r}'.format(
client_info.scopes or
['https://www.googleapis.com/auth/userinfo.email']))
printer('_VERSION = {0!r}'.format(client_info.version))
printer('_CLIENT_ID = {0!r}'.format(client_info.client_id))
printer('_CLIENT_SECRET = {0!r}'.format(client_info.client_secret))
printer('_USER_AGENT = {0!r}'.format(client_info.user_agent))
printer('_CLIENT_CLASS_NAME = {0!r}'.format(
client_info.client_class_name))
printer('_URL_VERSION = {0!r}'.format(client_info.url_version))
printer('_API_KEY = {0!r}'.format(client_info.api_key))
printer()
printer("def __init__(self, url='', credentials=None,")
with printer.Indent(indent=' '):
printer('get_credentials=True, http=None, model=None,')
printer('log_request=False, log_response=False,')
printer('credentials_args=None, default_global_params=None,')
printer('additional_http_headers=None, '
'response_encoding=None):')
with printer.Indent():
printer('"""Create a new %s handle."""', client_info.package)
printer('url = url or self.BASE_URL')
printer(
'super(%s, self).__init__(', client_info.client_class_name)
printer(' url, credentials=credentials,')
printer(' get_credentials=get_credentials, http=http, '
'model=model,')
printer(' log_request=log_request, '
'log_response=log_response,')
printer(' credentials_args=credentials_args,')
printer(' default_global_params=default_global_params,')
printer(' additional_http_headers=additional_http_headers,')
printer(' response_encoding=response_encoding)')
for name in self.__service_method_info_map.keys():
printer('self.%s = self.%s(self)',
name, self.__GetServiceClassName(name))
for name, method_info in self.__service_method_info_map.items():
self.__WriteSingleService(
printer, name, method_info, client_info.client_class_name)
def __RegisterService(self, service_name, method_info_map):
if service_name in self.__service_method_info_map:
raise ValueError(
'Attempt to re-register descriptor %s' % service_name)
self.__service_method_info_map[service_name] = method_info_map
def __CreateRequestType(self, method_description, body_type=None):
"""Create a request type for this method."""
schema = {}
schema['id'] = self.__names.ClassName('%sRequest' % (
self.__names.ClassName(method_description['id'], separator='.'),))
schema['type'] = 'object'
schema['properties'] = collections.OrderedDict()
if 'parameterOrder' not in method_description:
ordered_parameters = list(method_description.get('parameters', []))
else:
ordered_parameters = method_description['parameterOrder'][:]
for k in method_description['parameters']:
if k not in ordered_parameters:
ordered_parameters.append(k)
for parameter_name in ordered_parameters:
field = dict(method_description['parameters'][parameter_name])
if 'type' not in field:
raise ValueError('No type found in parameter %s' % field)
schema['properties'][parameter_name] = field
if body_type is not None:
body_field_name = self.__GetRequestField(
method_description, body_type)
if body_field_name in schema['properties']:
raise ValueError('Failed to normalize request resource name')
if 'description' not in body_type:
body_type['description'] = (
'A %s resource to be passed as the request body.' % (
self.__GetRequestType(body_type),))
schema['properties'][body_field_name] = body_type
self.__message_registry.AddDescriptorFromSchema(schema['id'], schema)
return schema['id']
def __CreateVoidResponseType(self, method_description):
"""Create an empty response type."""
schema = {}
method_name = self.__names.ClassName(
method_description['id'], separator='.')
schema['id'] = self.__names.ClassName('%sResponse' % method_name)
schema['type'] = 'object'
schema['description'] = 'An empty %s response.' % method_name
self.__message_registry.AddDescriptorFromSchema(schema['id'], schema)
return schema['id']
def __NeedRequestType(self, method_description, request_type):
"""Determine if this method needs a new request type created."""
if not request_type:
return True
method_id = method_description.get('id', '')
if method_id in self.__unelidable_request_methods:
return True
message = self.__message_registry.LookupDescriptorOrDie(request_type)
if message is None:
return True
field_names = [x.name for x in message.fields]
parameters = method_description.get('parameters', {})
for param_name, param_info in parameters.items():
if (param_info.get('location') != 'path' or
self.__names.CleanName(param_name) not in field_names):
break
else:
return False
return True
def __MaxSizeToInt(self, max_size):
"""Convert max_size to an int."""
size_groups = re.match(r'(?P<size>\d+)(?P<unit>.B)?$', max_size)
if size_groups is None:
raise ValueError('Could not parse maxSize')
size, unit = size_groups.group('size', 'unit')
shift = 0
if unit is not None:
unit_dict = {'KB': 10, 'MB': 20, 'GB': 30, 'TB': 40}
shift = unit_dict.get(unit.upper())
if shift is None:
raise ValueError('Unknown unit %s' % unit)
return int(size) * (1 << shift)
def __ComputeUploadConfig(self, media_upload_config, method_id):
"""Fill out the upload config for this method."""
config = base_api.ApiUploadInfo()
if 'maxSize' in media_upload_config:
config.max_size = self.__MaxSizeToInt(
media_upload_config['maxSize'])
if 'accept' not in media_upload_config:
logging.warning(
'No accept types found for upload configuration in '
'method %s, using */*', method_id)
config.accept.extend([
str(a) for a in media_upload_config.get('accept', '*/*')])
for accept_pattern in config.accept:
if not _MIME_PATTERN_RE.match(accept_pattern):
logging.warning('Unexpected MIME type: %s', accept_pattern)
protocols = media_upload_config.get('protocols', {})
for protocol in ('simple', 'resumable'):
media = protocols.get(protocol, {})
for attr in ('multipart', 'path'):
if attr in media:
setattr(config, '%s_%s' % (protocol, attr), media[attr])
return config
def __ComputeMethodInfo(self, method_description, request, response,
request_field):
"""Compute the base_api.ApiMethodInfo for this method."""
relative_path = self.__names.NormalizeRelativePath(
''.join((self.__client_info.base_path,
method_description['path'])))
method_id = method_description['id']
ordered_params = []
for param_name in method_description.get('parameterOrder', []):
param_info = method_description['parameters'][param_name]
if param_info.get('required', False):
ordered_params.append(param_name)
method_info = base_api.ApiMethodInfo(
relative_path=relative_path,
method_id=method_id,
http_method=method_description['httpMethod'],
description=util.CleanDescription(
method_description.get('description', '')),
query_params=[],
path_params=[],
ordered_params=ordered_params,
request_type_name=self.__names.ClassName(request),
response_type_name=self.__names.ClassName(response),
request_field=request_field,
)
flat_path = method_description.get('flatPath', None)
if flat_path is not None:
flat_path = self.__names.NormalizeRelativePath(
self.__client_info.base_path + flat_path)
if flat_path != relative_path:
method_info.flat_path = flat_path
if method_description.get('supportsMediaUpload', False):
method_info.upload_config = self.__ComputeUploadConfig(
method_description.get('mediaUpload'), method_id)
method_info.supports_download = method_description.get(
'supportsMediaDownload', False)
if method_description.get('apiVersion'):
method_info.api_version_param = method_description.get('apiVersion')
self.__all_scopes.update(method_description.get('scopes', ()))
for param, desc in method_description.get('parameters', {}).items():
param = self.__names.CleanName(param)
location = desc['location']
if location == 'query':
method_info.query_params.append(param)
elif location == 'path':
method_info.path_params.append(param)
else:
raise ValueError(
'Unknown parameter location %s for parameter %s' % (
location, param))
method_info.path_params.sort()
method_info.query_params.sort()
return method_info
def __BodyFieldName(self, body_type):
if body_type is None:
return ''
return self.__names.FieldName(body_type['$ref'])
def __GetRequestType(self, body_type):
return self.__names.ClassName(body_type.get('$ref'))
def __GetRequestField(self, method_description, body_type):
"""Determine the request field for this method."""
body_field_name = self.__BodyFieldName(body_type)
if body_field_name in method_description.get('parameters', {}):
body_field_name = self.__names.FieldName(
'%s_resource' % body_field_name)
# It's exceedingly unlikely that we'd get two name collisions, which
# means it's bound to happen at some point.
while body_field_name in method_description.get('parameters', {}):
body_field_name = self.__names.FieldName(
'%s_body' % body_field_name)
return body_field_name
def AddServiceFromResource(self, service_name, methods):
"""Add a new service named service_name with the given methods."""
service_name = self.__names.CleanName(service_name)
method_descriptions = methods.get('methods', {})
method_info_map = collections.OrderedDict()
items = sorted(method_descriptions.items())
for method_name, method_description in items:
method_name = self.__names.MethodName(method_name)
# NOTE: According to the discovery document, if the request or
# response is present, it will simply contain a `$ref`.
body_type = method_description.get('request')
if body_type is None:
request_type = None
else:
request_type = self.__GetRequestType(body_type)
if self.__NeedRequestType(method_description, request_type):
request = self.__CreateRequestType(
method_description, body_type=body_type)
request_field = self.__GetRequestField(
method_description, body_type)
else:
request = request_type
request_field = base_api.REQUEST_IS_BODY
if 'response' in method_description:
response = method_description['response']['$ref']
else:
response = self.__CreateVoidResponseType(method_description)
method_info_map[method_name] = self.__ComputeMethodInfo(
method_description, request, response, request_field)
nested_services = methods.get('resources', {})
services = sorted(nested_services.items())
for subservice_name, submethods in services:
new_service_name = '%s_%s' % (service_name, subservice_name)
self.AddServiceFromResource(new_service_name, submethods)
self.__RegisterService(service_name, method_info_map)
| python | Apache-2.0 | 1b14f58b3c895f542c724028dd0fb8ae0d816510 | 2026-01-05T07:12:01.390580Z | false |
andybrandt/mcp-simple-pubmed | https://github.com/andybrandt/mcp-simple-pubmed/blob/de245d350c456df353363a50051ed5547dedafc0/test_client.py | test_client.py | import asyncio
import os
from fastmcp.client import Client
from fastmcp.client.transports import StdioTransport
import json
async def main():
"""A simple test client to connect to the PubMed server and test its tools."""
# Set the required environment variable for the server process
server_env = {"PUBMED_EMAIL": "test@example.com"}
# Configure the stdio transport with the server script and environment
transport = StdioTransport(
command="python",
args=["mcp_simple_pubmed/server.py"],
env=server_env
)
# Create a client that uses our configured transport
client = Client(transport)
print("Starting client...")
async with client:
try:
# --- Test 1: List Tools ---
print("\n--- Running Test 1: List Tools ---")
tools = await client.list_tools()
print("--- Available Tools ---")
for tool in tools:
print(f"- {tool.name}: {tool.annotations.title if tool.annotations else 'No title'}")
print(f" {tool.description}\n")
# --- Test 2: Search for articles ---
print("\n\n--- Running Test 2: Search for articles ---")
print("Calling 'search_pubmed' tool with query: 'tuberculosis treatment'...")
search_result = await client.call_tool(
"search_pubmed",
{"query": "tuberculosis treatment", "max_results": 5}
)
print("\n--- Search Result ---")
parsed_search_result = json.loads(search_result.data)
print(json.dumps(parsed_search_result, indent=2))
# --- Test 3: Fetch full text of the first article from search ---
if parsed_search_result:
print("\n\n--- Running Test 3: Fetch full text of the first article from search ---")
first_article_pmid = parsed_search_result[0].get("pmid")
if first_article_pmid:
print(f"Calling 'get_paper_fulltext' for PMID: {first_article_pmid}...")
fulltext_result_1 = await client.call_tool(
"get_paper_fulltext",
{"pmid": first_article_pmid}
)
print("\n--- Full Text Result 1 ---")
# Print only a snippet as this can be long
print(fulltext_result_1.data[:500] + "...")
else:
print("Could not find PMID in the first search result.")
else:
print("Search returned no results, skipping full text fetch.")
# --- Test 4: Fetch full text for a specific, known article ---
print("\n\n--- Running Test 4: Fetch full text for a specific, known article ---")
specific_pmid = "24677277"
print(f"Calling 'get_paper_fulltext' for PMID: {specific_pmid}...")
fulltext_result_2 = await client.call_tool(
"get_paper_fulltext",
{"pmid": specific_pmid}
)
print("\n--- Full Text Result 2 ---")
# Print only a snippet if it's very long
result_text = fulltext_result_2.data
if len(result_text) > 1000:
print(result_text[:1000] + "\n\n... (truncated for brevity)")
else:
print(result_text)
# --- Test 5: Read abstract resource from the first search result ---
if parsed_search_result:
print("\n\n--- Running Test 5: Read abstract resource ---")
first_article = parsed_search_result[0]
abstract_uri = first_article.get("abstract_uri")
if abstract_uri:
print(f"Reading resource at URI: {abstract_uri}...")
abstract_result = await client.read_resource(abstract_uri)
print("\n--- Abstract Resource Result ---")
# The result from a resource read is a list of contents. We'll parse the first.
parsed_abstract = json.loads(abstract_result[0].text)
print(json.dumps(parsed_abstract, indent=2))
else:
print("Could not find abstract_uri in the first search result.")
else:
print("Search returned no results, skipping resource read.")
# --- Test 6: Read full_text resource for a specific, known article ---
print("\n\n--- Running Test 6: Read full_text resource ---")
specific_pmid_for_resource = "24677277"
full_text_uri = f"pubmed://{specific_pmid_for_resource}/full_text"
print(f"Reading resource at URI: {full_text_uri}...")
full_text_resource_result = await client.read_resource(full_text_uri)
print("\n--- Full Text Resource Result ---")
# The result from a resource read is a list of contents. We'll print the first.
result_text = full_text_resource_result[0].text
if len(result_text) > 1000:
print(result_text[:1000] + "\n\n... (truncated for brevity)")
else:
print(result_text)
# --- Test 7: List Prompts ---
print("\n\n--- Running Test 7: List Prompts ---")
prompts = await client.list_prompts()
print("--- Available Prompts ---")
for prompt in prompts:
print(f"- {prompt.name}: {prompt.description}")
if prompt.arguments:
for arg in prompt.arguments:
required_marker = " (required)" if arg.required else ""
print(f" - {arg.name}{required_marker}: {arg.description}")
# --- Test 8: Get a Prompt (systematic_review_search) ---
print("\n\n--- Running Test 8: Get Prompt (systematic_review_search) ---")
prompt_result = await client.get_prompt(
"systematic_review_search",
arguments={"topic": "diabetes prevention", "years": "3"}
)
print("--- Prompt Result ---")
for message in prompt_result.messages:
# Handle different content types
content = message.content
if hasattr(content, 'text'):
print(f"Content: {content.text[:300]}...")
else:
print(f"Content: {str(content)[:300]}...")
# --- Test 9: Get a Prompt (pico_search) ---
print("\n\n--- Running Test 9: Get Prompt (pico_search) ---")
pico_result = await client.get_prompt(
"pico_search",
arguments={
"population": "adults with type 2 diabetes",
"intervention": "metformin",
"comparison": "placebo",
"outcome": "HbA1c reduction"
}
)
print("--- PICO Prompt Result ---")
for message in pico_result.messages:
content = message.content
if hasattr(content, 'text'):
print(f"Content: {content.text[:300]}...")
else:
print(f"Content: {str(content)[:300]}...")
# --- Test 10: Get a Prompt (author_search) ---
print("\n\n--- Running Test 10: Get Prompt (author_search) ---")
author_result = await client.get_prompt(
"author_search",
arguments={"author_name": "Fauci Anthony", "affiliation": "NIH"}
)
print("--- Author Search Prompt Result ---")
for message in author_result.messages:
content = message.content
if hasattr(content, 'text'):
print(f"Content: {content.text[:300]}...")
else:
print(f"Content: {str(content)[:300]}...")
except Exception as e:
print(f"An error occurred: {e}")
if __name__ == "__main__":
asyncio.run(main()) | python | MIT | de245d350c456df353363a50051ed5547dedafc0 | 2026-01-05T07:12:06.493732Z | false |
andybrandt/mcp-simple-pubmed | https://github.com/andybrandt/mcp-simple-pubmed/blob/de245d350c456df353363a50051ed5547dedafc0/mcp_simple_pubmed/__main__.py | mcp_simple_pubmed/__main__.py | """
Main module entry point for mcp-simple-pubmed.
"""
from .server import main
if __name__ == "__main__":
main() | python | MIT | de245d350c456df353363a50051ed5547dedafc0 | 2026-01-05T07:12:06.493732Z | false |
andybrandt/mcp-simple-pubmed | https://github.com/andybrandt/mcp-simple-pubmed/blob/de245d350c456df353363a50051ed5547dedafc0/mcp_simple_pubmed/pubmed_search.py | mcp_simple_pubmed/pubmed_search.py | """
Search functionality for PubMed using Bio.Entrez.
"""
import os
import time
import logging
import xml.etree.ElementTree as ET
from typing import List, Dict, Optional, Any
from Bio import Entrez
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("pubmed-search")
class PubMedSearch:
"""Client for searching PubMed articles using Bio.Entrez."""
def __init__(self, email: str, tool: str, api_key: Optional[str] = None):
"""Initialize PubMed search client with required credentials.
Args:
email: Valid email address for API access
tool: Unique identifier for the tool
api_key: Optional API key for higher rate limits
"""
if not email:
raise ValueError("Email is required for PubMed search")
self.email = email
self.tool = tool
self.api_key = api_key
# Configure Entrez
Entrez.email = email
Entrez.tool = tool
if api_key:
Entrez.api_key = api_key
logger.info(f"PubMed search initialized with email: {email}, tool: {tool}")
async def search_articles(self, query: str, max_results: int = 10) -> List[Dict[str, Any]]:
"""Search for articles matching the query.
Args:
query: Search query string
max_results: Maximum number of results to return
Returns:
List of article metadata dictionaries
"""
try:
# Replace [Date - Publication] with [PDAT] in query
if "[Date - Publication]" in query:
query = query.replace("[Date - Publication]", "[PDAT]")
# Make sure date ranges are properly formatted
if ":" in query:
parts = query.split(":")
if len(parts) == 2:
before_range = parts[0].strip()
after_range = parts[1].strip()
if all(c not in before_range for c in '"()') and "[PDAT]" in before_range:
# Add quotes and parentheses for date range
date = before_range.replace("[PDAT]", "").strip()
query = f'("{date}"[PDAT] : {after_range}'
logger.info(f"Searching PubMed with query: {query}")
# Step 1: Search for article IDs
try:
handle = Entrez.esearch(db="pubmed", term=query, retmax=max_results)
search_results = Entrez.read(handle)
handle.close()
# Debug info
logger.info(f"Total results found: {search_results.get('Count', 'Unknown')}")
except Exception as e:
logger.error(f"Error in PubMed search: {str(e)}")
return []
if not 'IdList' in search_results or not search_results['IdList']:
logger.info("No results found")
return []
pmids = search_results['IdList']
logger.info(f"Found {len(pmids)} articles")
# Step 2: Get details for each article
results = []
for pmid in pmids:
logger.info(f"Fetching details for PMID {pmid}")
try:
detail_handle = Entrez.efetch(db="pubmed", id=pmid, rettype="xml")
article_xml = detail_handle.read()
detail_handle.close()
# Parse article details
article_root = ET.fromstring(article_xml)
# Get basic article data
article = {
"pmid": pmid,
"title": self._get_xml_text(article_root, './/ArticleTitle') or "No title",
"abstract": self._get_xml_text(article_root, './/Abstract/AbstractText') or "No abstract available",
"journal": self._get_xml_text(article_root, './/Journal/Title') or "",
"authors": []
}
# Get authors
author_list = article_root.findall('.//Author')
for author in author_list:
last_name = self._get_xml_text(author, 'LastName') or ""
fore_name = self._get_xml_text(author, 'ForeName') or ""
if last_name or fore_name:
article["authors"].append(f"{last_name} {fore_name}".strip())
# Get publication date
pub_date = article_root.find('.//PubDate')
if pub_date is not None:
year = self._get_xml_text(pub_date, 'Year')
month = self._get_xml_text(pub_date, 'Month')
day = self._get_xml_text(pub_date, 'Day')
article["publication_date"] = {
"year": year,
"month": month,
"day": day
}
# Get article identifiers (DOI, PMC)
article_id_list = article_root.findall('.//ArticleId')
for article_id in article_id_list:
id_type = article_id.get('IdType')
if id_type == 'doi':
article["doi"] = article_id.text
elif id_type == 'pmc':
article["pmc_id"] = article_id.text
# Add URLs
article["urls"] = self._generate_urls(pmid,
article.get("doi"),
article.get("pmc_id"))
# Add resource URIs
article["abstract_uri"] = f"pubmed://{pmid}/abstract"
article["full_text_uri"] = f"pubmed://{pmid}/full_text"
results.append(article)
except Exception as e:
logger.error(f"Error fetching details for PMID {pmid}: {str(e)}")
continue
return results
except Exception as e:
logger.exception(f"Error in search_articles: {str(e)}")
return []
def _get_xml_text(self, elem: Optional[ET.Element], xpath: str) -> Optional[str]:
"""Helper method to safely get text from XML element."""
if elem is None:
return None
found = elem.find(xpath)
return found.text if found is not None else None
def _generate_urls(self, pmid: str, doi: Optional[str] = None, pmc_id: Optional[str] = None) -> Dict[str, str]:
"""Generate URLs for human access.
Args:
pmid: PubMed ID
doi: Optional DOI
pmc_id: Optional PMC ID
Returns:
Dictionary with URLs
"""
urls = {
"pubmed": f"https://pubmed.ncbi.nlm.nih.gov/{pmid}/",
"pubmed_mobile": f"https://m.pubmed.ncbi.nlm.nih.gov/{pmid}/"
}
if doi:
urls["doi"] = f"https://doi.org/{doi}"
if pmc_id:
urls["pmc"] = f"https://www.ncbi.nlm.nih.gov/pmc/articles/{pmc_id}/"
return urls | python | MIT | de245d350c456df353363a50051ed5547dedafc0 | 2026-01-05T07:12:06.493732Z | false |
andybrandt/mcp-simple-pubmed | https://github.com/andybrandt/mcp-simple-pubmed/blob/de245d350c456df353363a50051ed5547dedafc0/mcp_simple_pubmed/__init__.py | mcp_simple_pubmed/__init__.py | """
MCP server providing access to PubMed articles through Entrez API.
"""
import asyncio
import os
from . import server
def main():
"""Main entry point for the package."""
asyncio.run(server.main())
__version__ = "0.1.0" | python | MIT | de245d350c456df353363a50051ed5547dedafc0 | 2026-01-05T07:12:06.493732Z | false |
andybrandt/mcp-simple-pubmed | https://github.com/andybrandt/mcp-simple-pubmed/blob/de245d350c456df353363a50051ed5547dedafc0/mcp_simple_pubmed/server.py | mcp_simple_pubmed/server.py | """
MCP server implementation for PubMed integration using FastMCP SDK.
"""
import os
import json
import logging
from typing import Optional, Dict, Any, Tuple, List
from fastmcp import FastMCP
from mcp.types import TextContent
from mcp_simple_pubmed.pubmed_client import PubMedClient
from mcp_simple_pubmed.fulltext_client import FullTextClient
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("pubmed-server")
# Initialize FastMCP app
app = FastMCP("pubmed-server")
def configure_clients() -> Tuple[PubMedClient, FullTextClient]:
"""Configure PubMed and full text clients with environment settings."""
email = os.environ.get("PUBMED_EMAIL")
if not email:
raise ValueError("PUBMED_EMAIL environment variable is required")
tool = os.environ.get("PUBMED_TOOL", "mcp-simple-pubmed")
api_key = os.environ.get("PUBMED_API_KEY")
pubmed_client = PubMedClient(email=email, tool=tool, api_key=api_key)
fulltext_client = FullTextClient(email=email, tool=tool, api_key=api_key)
return pubmed_client, fulltext_client
# Initialize the clients
pubmed_client, fulltext_client = configure_clients()
@app.tool(
annotations={
"title": "Search articles about medical and life sciences research available on PubMed.",
"readOnlyHint": True,
"openWorldHint": True # Calls external PubMed API
}
)
async def search_pubmed(query: str, max_results: int = 10) -> str:
"""Search PubMed for medical and life sciences research articles.
You can use these search features:
- Simple keyword search: "covid vaccine"
- Field-specific search:
- Title search: [Title]
- Author search: [Author]
- MeSH terms: [MeSH Terms]
- Journal: [Journal]
- Date ranges: Add year or date range like "2020:2024[Date - Publication]"
- Combine terms with AND, OR, NOT
- Use quotation marks for exact phrases
Examples:
- "covid vaccine" - basic search
- "breast cancer"[Title] AND "2023"[Date - Publication]
- "Smith J"[Author] AND "diabetes"
- "RNA"[MeSH Terms] AND "therapy"
The search will return:
- Paper titles
- Authors
- Publication details
- Abstract preview (when available)
- Links to full text (when available)
- DOI when available
- Keywords and MeSH terms
Note: Use quotes around multi-word terms for best results.
"""
try:
# Validate and constrain max_results
max_results = min(max(1, max_results), 50)
logger.info(f"Processing search with query: {query}, max_results: {max_results}")
# Perform the search
results = await pubmed_client.search_articles(
query=query,
max_results=max_results
)
# Create resource URIs for articles
articles_with_resources = []
for article in results:
pmid = article["pmid"]
# Add original URIs
article["abstract_uri"] = f"pubmed://{pmid}/abstract"
article["full_text_uri"] = f"pubmed://{pmid}/full_text"
# Add DOI URL if DOI exists
if "doi" in article:
article["doi_url"] = f"https://doi.org/{article['doi']}"
# Add PubMed URLs
article["pubmed_url"] = f"https://pubmed.ncbi.nlm.nih.gov/{pmid}/"
# Add PMC URL only if PMCID is available
if "pmcid" in article:
article["pmc_url"] = f"https://www.ncbi.nlm.nih.gov/pmc/articles/{article['pmcid']}/"
articles_with_resources.append(article)
# Format the response
formatted_results = json.dumps(articles_with_resources, indent=2)
logger.info(f"Search completed successfully, found {len(results)} results")
return formatted_results
except Exception as e:
logger.exception(f"Error in search_pubmed")
raise ValueError(f"Error processing search request: {str(e)}")
@app.tool(
annotations={
"title": "Get a paper's full text",
"readOnlyHint": True,
"openWorldHint": True # Calls external PubMed API
}
)
async def get_paper_fulltext(pmid: str) -> str:
"""Get full text of a PubMed article using its ID.
This tool attempts to retrieve the complete text of the paper if available through PubMed Central.
If the paper is not available in PMC, it will return a message explaining why and provide information
about where the text might be available (e.g., through DOI).
Example usage:
get_paper_fulltext(pmid="39661433")
Returns:
- If successful: The complete text of the paper
- If not available: A clear message explaining why (e.g., "not in PMC", "requires journal access")
"""
try:
logger.info(f"Attempting to get full text for PMID: {pmid}")
# First check PMC availability
available, pmc_id = await fulltext_client.check_full_text_availability(pmid)
if available:
full_text = await fulltext_client.get_full_text(pmid)
if full_text:
logger.info(f"Successfully retrieved full text from PMC for PMID {pmid}")
return full_text
# Get article details to provide alternative locations
article = await pubmed_client.get_article_details(pmid)
message = "Full text is not available in PubMed Central.\n\n"
message += "The article may be available at these locations:\n"
message += f"- PubMed page: https://pubmed.ncbi.nlm.nih.gov/{pmid}/\n"
if article and "doi" in article:
message += f"- Publisher's site (via DOI): https://doi.org/{article['doi']}\n"
logger.info(f"Full text not available in PMC for PMID {pmid}, provided alternative locations")
return message
except Exception as e:
logger.exception(f"Error in get_paper_fulltext")
raise ValueError(f"Error retrieving full text: {str(e)}")
@app.resource("pubmed://{pmid}/{resource_type}")
async def read_pubmed_resource(pmid: str, resource_type: str) -> str:
"""
Reads different types of content for a given PubMed ID (PMID).
This can be the article's abstract or its full text.
You can find PMIDs by searching for articles using the search_pubmed tool.
Example usage:
read_pubmed_resource(pmid="39661433", resource_type="abstract")
read_pubmed_resource(pmid="39661433", resource_type="full_text")
"""
logger.info(f"Reading resource for pmid={pmid}, type={resource_type}")
try:
if resource_type == "abstract":
article = await pubmed_client.get_article_details(pmid)
return json.dumps(article, indent=2)
elif resource_type == "full_text":
available, pmc_id = await fulltext_client.check_full_text_availability(pmid)
if available:
full_text = await fulltext_client.get_full_text(pmid)
if full_text:
return full_text
# If not available, provide the same helpful message as the tool
article = await pubmed_client.get_article_details(pmid)
message = "Full text is not available in PubMed Central.\n\n"
message += "The article may be available at these locations:\n"
message += f"- PubMed page: https://pubmed.ncbi.nlm.nih.gov/{pmid}/\n"
if article and "doi" in article:
message += f"- Publisher's site (via DOI): https://doi.org/{article['doi']}\n"
return message
else:
raise ValueError(f"Invalid resource type requested: {resource_type}")
except Exception as e:
logger.exception(f"Error reading resource pmid={pmid}, type={resource_type}")
raise ValueError(f"Error reading resource: {str(e)}")
# =============================================================================
# Prompts - Templates to help users construct effective PubMed searches
# =============================================================================
@app.prompt()
def systematic_review_search(topic: str, years: str = "5") -> List[TextContent]:
"""Generate a systematic review search strategy for a medical topic."""
return [
TextContent(
type="text",
text=f"""Help me create a comprehensive PubMed search strategy for a systematic review on: "{topic}"
Please build a search query that includes:
1. MeSH terms (Medical Subject Headings) for the main concept
2. Free-text synonyms and related terms
3. Boolean operators (AND, OR) to combine terms
4. Date filter for the last {years} years using [PDAT]
Use PubMed field tags like:
- [MeSH Terms] for controlled vocabulary
- [Title/Abstract] for free text
- [PDAT] for publication date
Example format: (term1[MeSH Terms] OR term2[Title/Abstract]) AND ("2020"[PDAT] : "2025"[PDAT])
After constructing the query, use the search_pubmed tool to execute it."""
)
]
@app.prompt()
def pico_search(
population: str,
intervention: str,
comparison: str = "",
outcome: str = ""
) -> List[TextContent]:
"""Build a PICO-based search query for clinical questions."""
pico_parts = [f"Population: {population}", f"Intervention: {intervention}"]
if comparison:
pico_parts.append(f"Comparison: {comparison}")
if outcome:
pico_parts.append(f"Outcome: {outcome}")
pico_list = "\n".join("- " + p for p in pico_parts)
return [
TextContent(
type="text",
text=f"""Help me search PubMed for this clinical question using the PICO framework:
{pico_list}
Please:
1. Identify MeSH terms and synonyms for each PICO element
2. Combine terms within each element using OR
3. Combine PICO elements using AND
4. Consider adding study type filters (e.g., Clinical Trial, Meta-Analysis)
Build the query and use search_pubmed to find relevant articles."""
)
]
@app.prompt()
def author_search(author_name: str, affiliation: str = "") -> List[TextContent]:
"""Find all publications by a specific author."""
affiliation_note = f" affiliated with {affiliation}" if affiliation else ""
return [
TextContent(
type="text",
text=f"""Help me find all PubMed publications by author: {author_name}{affiliation_note}
Please:
1. Format the author name correctly for PubMed (LastName FirstInitial, e.g., "Smith J")
2. Use the [Author] field tag
3. If affiliation is provided, combine with [Affiliation] field
4. Consider name variations (full name vs initials)
Build the query and use search_pubmed to retrieve the publications."""
)
]
def main():
"""Run the MCP server."""
app.run()
if __name__ == "__main__":
main() | python | MIT | de245d350c456df353363a50051ed5547dedafc0 | 2026-01-05T07:12:06.493732Z | false |
andybrandt/mcp-simple-pubmed | https://github.com/andybrandt/mcp-simple-pubmed/blob/de245d350c456df353363a50051ed5547dedafc0/mcp_simple_pubmed/fulltext_client.py | mcp_simple_pubmed/fulltext_client.py | """
Client for retrieving full text content of PubMed articles.
Separate from main PubMed client to maintain code separation and stability.
"""
import logging
import time
import http.client
from typing import Optional, Tuple
from Bio import Entrez
import xml.etree.ElementTree as ET
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("pubmed-fulltext")
class FullTextClient:
"""Client for retrieving full text content from PubMed Central."""
def __init__(self, email: str, tool: str, api_key: Optional[str] = None):
"""Initialize full text client with required credentials.
Args:
email: Valid email address for API access
tool: Unique identifier for the tool
api_key: Optional API key for higher rate limits
"""
self.email = email
self.tool = tool
self.api_key = api_key
# Configure Entrez
Entrez.email = email
Entrez.tool = tool
if api_key:
Entrez.api_key = api_key
async def check_full_text_availability(self, pmid: str) -> Tuple[bool, Optional[str]]:
"""Check if full text is available in PMC and get PMC ID if it exists.
Args:
pmid: PubMed ID of the article
Returns:
Tuple of (availability boolean, PMC ID if available)
"""
try:
logger.info(f"Checking PMC availability for PMID {pmid}")
handle = Entrez.elink(dbfrom="pubmed", db="pmc", id=pmid)
if not handle:
logger.info(f"No PMC link found for PMID {pmid}")
return False, None
xml_content = handle.read()
handle.close()
# Parse XML to get PMC ID
root = ET.fromstring(xml_content)
linksetdb = root.find(".//LinkSetDb")
if linksetdb is None:
logger.info(f"No PMC ID found for PMID {pmid}")
return False, None
id_elem = linksetdb.find(".//Id")
if id_elem is None:
logger.info(f"No PMC ID element found for PMID {pmid}")
return False, None
pmc_id = id_elem.text
logger.info(f"Found PMC ID {pmc_id} for PMID {pmid}")
return True, pmc_id
except Exception as e:
logger.exception(f"Error checking PMC availability for PMID {pmid}: {str(e)}")
return False, None
async def get_full_text(self, pmid: str) -> Optional[str]:
"""Get full text of the article if available through PMC.
Handles truncated responses by making additional requests.
Args:
pmid: PubMed ID of the article
Returns:
Full text content if available, None otherwise
"""
try:
# First check availability and get PMC ID
available, pmc_id = await self.check_full_text_availability(pmid)
if not available or pmc_id is None:
logger.info(f"Full text not available in PMC for PMID {pmid}")
return None
logger.info(f"Fetching full text for PMC ID {pmc_id}")
content = ""
retstart = 0
while True:
full_text_handle = Entrez.efetch(
db="pmc",
id=pmc_id,
rettype="xml",
retstart=retstart
)
if not full_text_handle:
break
chunk = full_text_handle.read()
full_text_handle.close()
if isinstance(chunk, bytes):
chunk = chunk.decode('utf-8')
content += chunk
# Check if there might be more content
if "[truncated]" not in chunk and "Result too long" not in chunk:
break
# Increment retstart for next chunk
retstart += len(chunk)
# Add small delay to respect API rate limits
time.sleep(0.5)
return content
except Exception as e:
logger.exception(f"Error getting full text for PMID {pmid}: {str(e)}")
return None | python | MIT | de245d350c456df353363a50051ed5547dedafc0 | 2026-01-05T07:12:06.493732Z | false |
andybrandt/mcp-simple-pubmed | https://github.com/andybrandt/mcp-simple-pubmed/blob/de245d350c456df353363a50051ed5547dedafc0/mcp_simple_pubmed/pubmed_client.py | mcp_simple_pubmed/pubmed_client.py | """
Client for interacting with PubMed/Entrez API.
"""
import os
import time
import logging
import http.client
import xml.etree.ElementTree as ET
from typing import List, Dict, Optional, Any
from Bio import Entrez
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("pubmed-client")
class PubMedClient:
"""Client for interacting with PubMed/Entrez API."""
def __init__(self, email: str, tool: str, api_key: Optional[str] = None):
"""Initialize PubMed client with required credentials.
Args:
email: Valid email address for API access
tool: Unique identifier for the tool
api_key: Optional API key for higher rate limits
"""
self.email = email
self.tool = tool
self.api_key = api_key
# Configure Entrez
Entrez.email = email
Entrez.tool = tool
if api_key:
Entrez.api_key = api_key
async def search_articles(self, query: str, max_results: int = 10) -> List[Dict[str, Any]]:
"""Search for articles matching the query.
Args:
query: Search query string
max_results: Maximum number of results to return
Returns:
List of article metadata dictionaries
"""
try:
logger.info(f"Searching PubMed with query: {query}")
results = []
# Step 1: Search for article IDs
handle = Entrez.esearch(db="pubmed", term=query, retmax=str(max_results))
if not handle:
logger.error("Got None handle from esearch")
return []
if isinstance(handle, http.client.HTTPResponse):
logger.info("Got valid HTTP response from esearch")
xml_content = handle.read()
handle.close()
# Parse XML to get IDs
root = ET.fromstring(xml_content)
id_list = root.findall('.//Id')
if not id_list:
logger.info("No results found")
return []
pmids = [id_elem.text for id_elem in id_list]
logger.info(f"Found {len(pmids)} articles")
# Step 2: Get details for each article
for pmid in pmids:
article = await self.get_article_details(pmid)
if article:
results.append(article)
return results
except Exception as e:
logger.exception(f"Error in search_articles: {str(e)}")
raise
async def get_article_details(self, pmid: str) -> Optional[Dict[str, Any]]:
"""Get details for a specific article by PMID.
Args:
pmid: PubMed ID of the article
Returns:
Dictionary with article metadata or None if not found
"""
try:
logger.info(f"Fetching details for PMID {pmid}")
detail_handle = Entrez.efetch(db="pubmed", id=pmid, rettype="xml")
if detail_handle and isinstance(detail_handle, http.client.HTTPResponse):
article_xml = detail_handle.read()
detail_handle.close()
# Parse article details
article_root = ET.fromstring(article_xml)
# Get basic article data
article = {
"pmid": pmid,
"title": self._get_xml_text(article_root, './/ArticleTitle') or "No title",
"abstract": self._get_full_abstract(article_root) or "No abstract available",
"journal": self._get_xml_text(article_root, './/Journal/Title') or "",
"authors": [],
"keywords": [],
"mesh_terms": []
}
# Get authors
author_list = article_root.findall('.//Author')
for author in author_list:
last_name = self._get_xml_text(author, 'LastName') or ""
fore_name = self._get_xml_text(author, 'ForeName') or ""
if last_name or fore_name:
article["authors"].append(f"{last_name} {fore_name}".strip())
# Get publication date
pub_date = article_root.find('.//PubDate')
if pub_date is not None:
year = self._get_xml_text(pub_date, 'Year')
month = self._get_xml_text(pub_date, 'Month')
day = self._get_xml_text(pub_date, 'Day')
article["publication_date"] = {
"year": year,
"month": month,
"day": day
}
# Get DOI and PMCID if available
# Important: Only get ArticleIds from the main ArticleIdList, not from references
pubmed_data = article_root.find('.//PubmedData')
if pubmed_data is not None:
# Use direct child path to avoid getting IDs from ReferenceList
article_id_list_elem = pubmed_data.find('ArticleIdList')
if article_id_list_elem is not None:
for article_id in article_id_list_elem:
id_type = article_id.get('IdType')
if id_type == 'doi':
article["doi"] = article_id.text
elif id_type == 'pmc':
article["pmcid"] = article_id.text
# Get Keywords
keyword_list = article_root.findall('.//Keyword')
for keyword in keyword_list:
if keyword.text:
# Clean up keyword text (remove trailing periods, etc.)
clean_keyword = keyword.text.strip().rstrip('.')
if clean_keyword:
article["keywords"].append(clean_keyword)
# Get MeSH terms
mesh_heading_list = article_root.findall('.//MeshHeading')
for mesh_heading in mesh_heading_list:
descriptor = mesh_heading.find('DescriptorName')
if descriptor is not None and descriptor.text:
mesh_term = {
"descriptor": descriptor.text,
"ui": descriptor.get('UI', ''),
"qualifiers": []
}
# Get qualifiers if present
qualifiers = mesh_heading.findall('QualifierName')
for qualifier in qualifiers:
if qualifier.text:
mesh_term["qualifiers"].append({
"name": qualifier.text,
"ui": qualifier.get('UI', '')
})
article["mesh_terms"].append(mesh_term)
return article
return None
except Exception as e:
logger.exception(f"Error getting article details for PMID {pmid}: {str(e)}")
return None
def _get_xml_text(self, elem: Optional[ET.Element], xpath: str) -> Optional[str]:
"""Helper method to safely get text from XML element."""
if elem is None:
return None
found = elem.find(xpath)
return found.text if found is not None else None
def _get_full_abstract(self, article_root: Optional[ET.Element]) -> Optional[str]:
"""Get complete abstract text, handling structured abstracts with multiple sections."""
if article_root is None:
return None
abstract_texts = article_root.findall('.//Abstract/AbstractText')
if not abstract_texts:
return None
# If there's only one AbstractText element, return it directly
if len(abstract_texts) == 1:
return abstract_texts[0].text
# For structured abstracts with multiple sections
abstract_parts = []
for text_elem in abstract_texts:
label = text_elem.get('Label')
text = text_elem.text or ""
if label:
# Format as "LABEL: text"
abstract_parts.append(f"{label}: {text}")
else:
abstract_parts.append(text)
# Join all parts with double newline for readability
return "\n\n".join(abstract_parts) | python | MIT | de245d350c456df353363a50051ed5547dedafc0 | 2026-01-05T07:12:06.493732Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.