Initial commit (Clean history)

This commit is contained in:
anhduy-tech
2025-12-30 11:27:14 +07:00
commit ef48c93de0
19255 changed files with 3248867 additions and 0 deletions

View File

@@ -0,0 +1,590 @@
# Copyright 2014 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared helpers for Google Cloud packages.
This module is not part of the public API surface.
"""
from __future__ import absolute_import
import calendar
import datetime
import http.client
import os
import re
from threading import local as Local
from typing import Union
import google.auth
import google.auth.transport.requests
from google.protobuf import duration_pb2
from google.protobuf import timestamp_pb2
try:
import grpc
import google.auth.transport.grpc
except ImportError: # pragma: NO COVER
grpc = None
# `google.cloud._helpers._NOW` is deprecated
_NOW = datetime.datetime.utcnow
UTC = datetime.timezone.utc # Singleton instance to be used throughout.
_EPOCH = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc)
_RFC3339_MICROS = "%Y-%m-%dT%H:%M:%S.%fZ"
_RFC3339_NO_FRACTION = "%Y-%m-%dT%H:%M:%S"
_TIMEONLY_W_MICROS = "%H:%M:%S.%f"
_TIMEONLY_NO_FRACTION = "%H:%M:%S"
# datetime.strptime cannot handle nanosecond precision: parse w/ regex
_RFC3339_NANOS = re.compile(
r"""
(?P<no_fraction>
\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2} # YYYY-MM-DDTHH:MM:SS
)
( # Optional decimal part
\. # decimal point
(?P<nanos>\d{1,9}) # nanoseconds, maybe truncated
)?
Z # Zulu
""",
re.VERBOSE,
)
# NOTE: Catching this ImportError is a workaround for GAE not supporting the
# "pwd" module which is imported lazily when "expanduser" is called.
_USER_ROOT: Union[str, None]
try:
_USER_ROOT = os.path.expanduser("~")
except ImportError: # pragma: NO COVER
_USER_ROOT = None
_GCLOUD_CONFIG_FILE = os.path.join("gcloud", "configurations", "config_default")
_GCLOUD_CONFIG_SECTION = "core"
_GCLOUD_CONFIG_KEY = "project"
class _LocalStack(Local):
"""Manage a thread-local LIFO stack of resources.
Intended for use in :class:`google.cloud.datastore.batch.Batch.__enter__`,
:class:`google.cloud.storage.batch.Batch.__enter__`, etc.
"""
def __init__(self):
super(_LocalStack, self).__init__()
self._stack = []
def __iter__(self):
"""Iterate the stack in LIFO order."""
return iter(reversed(self._stack))
def push(self, resource):
"""Push a resource onto our stack."""
self._stack.append(resource)
def pop(self):
"""Pop a resource from our stack.
:rtype: object
:returns: the top-most resource, after removing it.
:raises IndexError: if the stack is empty.
"""
return self._stack.pop()
@property
def top(self):
"""Get the top-most resource
:rtype: object
:returns: the top-most item, or None if the stack is empty.
"""
if self._stack:
return self._stack[-1]
def _ensure_tuple_or_list(arg_name, tuple_or_list):
"""Ensures an input is a tuple or list.
This effectively reduces the iterable types allowed to a very short
allowlist: list and tuple.
:type arg_name: str
:param arg_name: Name of argument to use in error message.
:type tuple_or_list: sequence of str
:param tuple_or_list: Sequence to be verified.
:rtype: list of str
:returns: The ``tuple_or_list`` passed in cast to a ``list``.
:raises TypeError: if the ``tuple_or_list`` is not a tuple or list.
"""
if not isinstance(tuple_or_list, (tuple, list)):
raise TypeError(
"Expected %s to be a tuple or list. "
"Received %r" % (arg_name, tuple_or_list)
)
return list(tuple_or_list)
def _determine_default_project(project=None):
"""Determine default project ID explicitly or implicitly as fall-back.
See :func:`google.auth.default` for details on how the default project
is determined.
:type project: str
:param project: Optional. The project name to use as default.
:rtype: str or ``NoneType``
:returns: Default project if it can be determined.
"""
if project is None:
_, project = google.auth.default()
return project
def _millis(when):
"""Convert a zone-aware datetime to integer milliseconds.
:type when: :class:`datetime.datetime`
:param when: the datetime to convert
:rtype: int
:returns: milliseconds since epoch for ``when``
"""
micros = _microseconds_from_datetime(when)
return micros // 1000
def _datetime_from_microseconds(value):
"""Convert timestamp to datetime, assuming UTC.
:type value: float
:param value: The timestamp to convert
:rtype: :class:`datetime.datetime`
:returns: The datetime object created from the value.
"""
return _EPOCH + datetime.timedelta(microseconds=value)
def _microseconds_from_datetime(value):
"""Convert non-none datetime to microseconds.
:type value: :class:`datetime.datetime`
:param value: The timestamp to convert.
:rtype: int
:returns: The timestamp, in microseconds.
"""
if not value.tzinfo:
value = value.replace(tzinfo=UTC)
# Regardless of what timezone is on the value, convert it to UTC.
value = value.astimezone(UTC)
# Convert the datetime to a microsecond timestamp.
return int(calendar.timegm(value.timetuple()) * 1e6) + value.microsecond
def _millis_from_datetime(value):
"""Convert non-none datetime to timestamp, assuming UTC.
:type value: :class:`datetime.datetime`
:param value: (Optional) the timestamp
:rtype: int, or ``NoneType``
:returns: the timestamp, in milliseconds, or None
"""
if value is not None:
return _millis(value)
def _date_from_iso8601_date(value):
"""Convert a ISO8601 date string to native datetime date
:type value: str
:param value: The date string to convert
:rtype: :class:`datetime.date`
:returns: A datetime date object created from the string
"""
return datetime.datetime.strptime(value, "%Y-%m-%d").date()
def _time_from_iso8601_time_naive(value):
"""Convert a zoneless ISO8601 time string to naive datetime time
:type value: str
:param value: The time string to convert
:rtype: :class:`datetime.time`
:returns: A datetime time object created from the string
:raises ValueError: if the value does not match a known format.
"""
if len(value) == 8: # HH:MM:SS
fmt = _TIMEONLY_NO_FRACTION
elif len(value) == 15: # HH:MM:SS.micros
fmt = _TIMEONLY_W_MICROS
else:
raise ValueError("Unknown time format: {}".format(value))
return datetime.datetime.strptime(value, fmt).time()
def _rfc3339_to_datetime(dt_str):
"""Convert a microsecond-precision timestamp to a native datetime.
:type dt_str: str
:param dt_str: The string to convert.
:rtype: :class:`datetime.datetime`
:returns: The datetime object created from the string.
"""
return datetime.datetime.strptime(dt_str, _RFC3339_MICROS).replace(tzinfo=UTC)
def _rfc3339_nanos_to_datetime(dt_str):
"""Convert a nanosecond-precision timestamp to a native datetime.
.. note::
Python datetimes do not support nanosecond precision; this function
therefore truncates such values to microseconds.
:type dt_str: str
:param dt_str: The string to convert.
:rtype: :class:`datetime.datetime`
:returns: The datetime object created from the string.
:raises ValueError: If the timestamp does not match the RFC 3339
regular expression.
"""
with_nanos = _RFC3339_NANOS.match(dt_str)
if with_nanos is None:
raise ValueError(
"Timestamp: %r, does not match pattern: %r"
% (dt_str, _RFC3339_NANOS.pattern)
)
bare_seconds = datetime.datetime.strptime(
with_nanos.group("no_fraction"), _RFC3339_NO_FRACTION
)
fraction = with_nanos.group("nanos")
if fraction is None:
micros = 0
else:
scale = 9 - len(fraction)
nanos = int(fraction) * (10**scale)
micros = nanos // 1000
return bare_seconds.replace(microsecond=micros, tzinfo=UTC)
def _datetime_to_rfc3339(value, ignore_zone=True):
"""Convert a timestamp to a string.
:type value: :class:`datetime.datetime`
:param value: The datetime object to be converted to a string.
:type ignore_zone: bool
:param ignore_zone: If True, then the timezone (if any) of the datetime
object is ignored.
:rtype: str
:returns: The string representing the datetime stamp.
"""
if not ignore_zone and value.tzinfo is not None:
# Convert to UTC and remove the time zone info.
value = value.replace(tzinfo=None) - value.utcoffset()
return value.strftime(_RFC3339_MICROS)
def _to_bytes(value, encoding="ascii"):
"""Converts a string value to bytes, if necessary.
:type value: str / bytes or unicode
:param value: The string/bytes value to be converted.
:type encoding: str
:param encoding: The encoding to use to convert unicode to bytes. Defaults
to "ascii", which will not allow any characters from
ordinals larger than 127. Other useful values are
"latin-1", which which will only allows byte ordinals
(up to 255) and "utf-8", which will encode any unicode
that needs to be.
:rtype: str / bytes
:returns: The original value converted to bytes (if unicode) or as passed
in if it started out as bytes.
:raises TypeError: if the value could not be converted to bytes.
"""
result = value.encode(encoding) if isinstance(value, str) else value
if isinstance(result, bytes):
return result
else:
raise TypeError("%r could not be converted to bytes" % (value,))
def _bytes_to_unicode(value):
"""Converts bytes to a unicode value, if necessary.
:type value: bytes
:param value: bytes value to attempt string conversion on.
:rtype: str
:returns: The original value converted to unicode (if bytes) or as passed
in if it started out as unicode.
:raises ValueError: if the value could not be converted to unicode.
"""
result = value.decode("utf-8") if isinstance(value, bytes) else value
if isinstance(result, str):
return result
else:
raise ValueError("%r could not be converted to unicode" % (value,))
def _from_any_pb(pb_type, any_pb):
"""Converts an Any protobuf to the specified message type
Args:
pb_type (type): the type of the message that any_pb stores an instance
of.
any_pb (google.protobuf.any_pb2.Any): the object to be converted.
Returns:
pb_type: An instance of the pb_type message.
Raises:
TypeError: if the message could not be converted.
"""
msg = pb_type()
if not any_pb.Unpack(msg):
raise TypeError(
"Could not convert {} to {}".format(
any_pb.__class__.__name__, pb_type.__name__
)
)
return msg
def _pb_timestamp_to_datetime(timestamp_pb):
"""Convert a Timestamp protobuf to a datetime object.
:type timestamp_pb: :class:`google.protobuf.timestamp_pb2.Timestamp`
:param timestamp_pb: A Google returned timestamp protobuf.
:rtype: :class:`datetime.datetime`
:returns: A UTC datetime object converted from a protobuf timestamp.
"""
return _EPOCH + datetime.timedelta(
seconds=timestamp_pb.seconds, microseconds=(timestamp_pb.nanos / 1000.0)
)
def _pb_timestamp_to_rfc3339(timestamp_pb):
"""Convert a Timestamp protobuf to an RFC 3339 string.
:type timestamp_pb: :class:`google.protobuf.timestamp_pb2.Timestamp`
:param timestamp_pb: A Google returned timestamp protobuf.
:rtype: str
:returns: An RFC 3339 formatted timestamp string.
"""
timestamp = _pb_timestamp_to_datetime(timestamp_pb)
return _datetime_to_rfc3339(timestamp)
def _datetime_to_pb_timestamp(when):
"""Convert a datetime object to a Timestamp protobuf.
:type when: :class:`datetime.datetime`
:param when: the datetime to convert
:rtype: :class:`google.protobuf.timestamp_pb2.Timestamp`
:returns: A timestamp protobuf corresponding to the object.
"""
ms_value = _microseconds_from_datetime(when)
seconds, micros = divmod(ms_value, 10**6)
nanos = micros * 10**3
return timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)
def _timedelta_to_duration_pb(timedelta_val):
"""Convert a Python timedelta object to a duration protobuf.
.. note::
The Python timedelta has a granularity of microseconds while
the protobuf duration type has a duration of nanoseconds.
:type timedelta_val: :class:`datetime.timedelta`
:param timedelta_val: A timedelta object.
:rtype: :class:`google.protobuf.duration_pb2.Duration`
:returns: A duration object equivalent to the time delta.
"""
duration_pb = duration_pb2.Duration()
duration_pb.FromTimedelta(timedelta_val)
return duration_pb
def _duration_pb_to_timedelta(duration_pb):
"""Convert a duration protobuf to a Python timedelta object.
.. note::
The Python timedelta has a granularity of microseconds while
the protobuf duration type has a duration of nanoseconds.
:type duration_pb: :class:`google.protobuf.duration_pb2.Duration`
:param duration_pb: A protobuf duration object.
:rtype: :class:`datetime.timedelta`
:returns: The converted timedelta object.
"""
return datetime.timedelta(
seconds=duration_pb.seconds, microseconds=(duration_pb.nanos / 1000.0)
)
def _name_from_project_path(path, project, template):
"""Validate a URI path and get the leaf object's name.
:type path: str
:param path: URI path containing the name.
:type project: str
:param project: (Optional) The project associated with the request. It is
included for validation purposes. If passed as None,
disables validation.
:type template: str
:param template: Template regex describing the expected form of the path.
The regex must have two named groups, 'project' and
'name'.
:rtype: str
:returns: Name parsed from ``path``.
:raises ValueError: if the ``path`` is ill-formed or if the project from
the ``path`` does not agree with the ``project``
passed in.
"""
if isinstance(template, str):
template = re.compile(template)
match = template.match(path)
if not match:
raise ValueError(
'path "%s" did not match expected pattern "%s"' % (path, template.pattern)
)
if project is not None:
found_project = match.group("project")
if found_project != project:
raise ValueError(
"Project from client (%s) should agree with "
"project from resource(%s)." % (project, found_project)
)
return match.group("name")
def make_secure_channel(credentials, user_agent, host, extra_options=()):
"""Makes a secure channel for an RPC service.
Uses / depends on gRPC.
:type credentials: :class:`google.auth.credentials.Credentials`
:param credentials: The OAuth2 Credentials to use for creating
access tokens.
:type user_agent: str
:param user_agent: The user agent to be used with API requests.
:type host: str
:param host: The host for the service.
:type extra_options: tuple
:param extra_options: (Optional) Extra gRPC options used when creating the
channel.
:rtype: :class:`grpc._channel.Channel`
:returns: gRPC secure channel with credentials attached.
"""
target = "%s:%d" % (host, http.client.HTTPS_PORT)
http_request = google.auth.transport.requests.Request()
user_agent_option = ("grpc.primary_user_agent", user_agent)
options = (user_agent_option,) + extra_options
return google.auth.transport.grpc.secure_authorized_channel(
credentials, http_request, target, options=options
)
def make_secure_stub(credentials, user_agent, stub_class, host, extra_options=()):
"""Makes a secure stub for an RPC service.
Uses / depends on gRPC.
:type credentials: :class:`google.auth.credentials.Credentials`
:param credentials: The OAuth2 Credentials to use for creating
access tokens.
:type user_agent: str
:param user_agent: The user agent to be used with API requests.
:type stub_class: type
:param stub_class: A gRPC stub type for a given service.
:type host: str
:param host: The host for the service.
:type extra_options: tuple
:param extra_options: (Optional) Extra gRPC options passed when creating
the channel.
:rtype: object, instance of ``stub_class``
:returns: The stub object used to make gRPC requests to a given API.
"""
channel = make_secure_channel(
credentials, user_agent, host, extra_options=extra_options
)
return stub_class(channel)
def make_insecure_stub(stub_class, host, port=None):
"""Makes an insecure stub for an RPC service.
Uses / depends on gRPC.
:type stub_class: type
:param stub_class: A gRPC stub type for a given service.
:type host: str
:param host: The host for the service. May also include the port
if ``port`` is unspecified.
:type port: int
:param port: (Optional) The port for the service.
:rtype: object, instance of ``stub_class``
:returns: The stub object used to make gRPC requests to a given API.
"""
if port is None:
target = host
else:
# NOTE: This assumes port != http.client.HTTPS_PORT:
target = "%s:%d" % (host, port)
channel = grpc.insecure_channel(target)
return stub_class(channel)

View File

@@ -0,0 +1,2 @@
# Marker file for PEP 561.
# This package uses inline types.

View File

@@ -0,0 +1,499 @@
# Copyright 2014 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared implementation of connections to API servers."""
import collections
import collections.abc
import json
import os
import platform
from typing import Optional
from urllib.parse import urlencode
import warnings
from google.api_core.client_info import ClientInfo
from google.cloud import exceptions
from google.cloud import version
API_BASE_URL = "https://www.googleapis.com"
"""The base of the API call URL."""
DEFAULT_USER_AGENT = "gcloud-python/{0}".format(version.__version__)
"""The user agent for google-cloud-python requests."""
CLIENT_INFO_HEADER = "X-Goog-API-Client"
CLIENT_INFO_TEMPLATE = "gl-python/" + platform.python_version() + " gccl/{}"
_USER_AGENT_ALL_CAPS_DEPRECATED = """\
The 'USER_AGENT' class-level attribute is deprecated. Please use
'user_agent' instead.
"""
_EXTRA_HEADERS_ALL_CAPS_DEPRECATED = """\
The '_EXTRA_HEADERS' class-level attribute is deprecated. Please use
'extra_headers' instead.
"""
_DEFAULT_TIMEOUT = 60 # in seconds
class Connection(object):
"""A generic connection to Google Cloud Platform.
:type client: :class:`~google.cloud.client.Client`
:param client: The client that owns the current connection.
:type client_info: :class:`~google.api_core.client_info.ClientInfo`
:param client_info: (Optional) instance used to generate user agent.
"""
_user_agent = DEFAULT_USER_AGENT
def __init__(self, client, client_info=None):
self._client = client
if client_info is None:
client_info = ClientInfo()
self._client_info = client_info
self._extra_headers = {}
@property
def USER_AGENT(self):
"""Deprecated: get / set user agent sent by connection.
:rtype: str
:returns: user agent
"""
warnings.warn(_USER_AGENT_ALL_CAPS_DEPRECATED, DeprecationWarning, stacklevel=2)
return self.user_agent
@USER_AGENT.setter
def USER_AGENT(self, value):
warnings.warn(_USER_AGENT_ALL_CAPS_DEPRECATED, DeprecationWarning, stacklevel=2)
self.user_agent = value
@property
def user_agent(self):
"""Get / set user agent sent by connection.
:rtype: str
:returns: user agent
"""
return self._client_info.to_user_agent()
@user_agent.setter
def user_agent(self, value):
self._client_info.user_agent = value
@property
def _EXTRA_HEADERS(self):
"""Deprecated: get / set extra headers sent by connection.
:rtype: dict
:returns: header keys / values
"""
warnings.warn(
_EXTRA_HEADERS_ALL_CAPS_DEPRECATED, DeprecationWarning, stacklevel=2
)
return self.extra_headers
@_EXTRA_HEADERS.setter
def _EXTRA_HEADERS(self, value):
warnings.warn(
_EXTRA_HEADERS_ALL_CAPS_DEPRECATED, DeprecationWarning, stacklevel=2
)
self.extra_headers = value
@property
def extra_headers(self):
"""Get / set extra headers sent by connection.
:rtype: dict
:returns: header keys / values
"""
return self._extra_headers
@extra_headers.setter
def extra_headers(self, value):
self._extra_headers = value
@property
def credentials(self):
"""Getter for current credentials.
:rtype: :class:`google.auth.credentials.Credentials` or
:class:`NoneType`
:returns: The credentials object associated with this connection.
"""
return self._client._credentials
@property
def http(self):
"""A getter for the HTTP transport used in talking to the API.
Returns:
google.auth.transport.requests.AuthorizedSession:
A :class:`requests.Session` instance.
"""
return self._client._http
class JSONConnection(Connection):
"""A connection to a Google JSON-based API.
These APIs are discovery based. For reference:
https://developers.google.com/discovery/
This defines :meth:`api_request` for making a generic JSON
API request and API requests are created elsewhere.
* :attr:`API_BASE_URL`
* :attr:`API_VERSION`
* :attr:`API_URL_TEMPLATE`
must be updated by subclasses.
"""
API_BASE_URL: Optional[str] = None
"""The base of the API call URL."""
API_BASE_MTLS_URL: Optional[str] = None
"""The base of the API call URL for mutual TLS."""
ALLOW_AUTO_SWITCH_TO_MTLS_URL = False
"""Indicates if auto switch to mTLS url is allowed."""
API_VERSION: Optional[str] = None
"""The version of the API, used in building the API call's URL."""
API_URL_TEMPLATE: Optional[str] = None
"""A template for the URL of a particular API call."""
def get_api_base_url_for_mtls(self, api_base_url=None):
"""Return the api base url for mutual TLS.
Typically, you shouldn't need to use this method.
The logic is as follows:
If `api_base_url` is provided, just return this value; otherwise, the
return value depends `GOOGLE_API_USE_MTLS_ENDPOINT` environment variable
value.
If the environment variable value is "always", return `API_BASE_MTLS_URL`.
If the environment variable value is "never", return `API_BASE_URL`.
Otherwise, if `ALLOW_AUTO_SWITCH_TO_MTLS_URL` is True and the underlying
http is mTLS, then return `API_BASE_MTLS_URL`; otherwise return `API_BASE_URL`.
:type api_base_url: str
:param api_base_url: User provided api base url. It takes precedence over
`API_BASE_URL` and `API_BASE_MTLS_URL`.
:rtype: str
:returns: The api base url used for mTLS.
"""
if api_base_url:
return api_base_url
env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if env == "always":
url_to_use = self.API_BASE_MTLS_URL
elif env == "never":
url_to_use = self.API_BASE_URL
else:
if self.ALLOW_AUTO_SWITCH_TO_MTLS_URL:
url_to_use = (
self.API_BASE_MTLS_URL if self.http.is_mtls else self.API_BASE_URL
)
else:
url_to_use = self.API_BASE_URL
return url_to_use
def build_api_url(
self, path, query_params=None, api_base_url=None, api_version=None
):
"""Construct an API url given a few components, some optional.
Typically, you shouldn't need to use this method.
:type path: str
:param path: The path to the resource (ie, ``'/b/bucket-name'``).
:type query_params: dict or list
:param query_params: A dictionary of keys and values (or list of
key-value pairs) to insert into the query
string of the URL.
:type api_base_url: str
:param api_base_url: The base URL for the API endpoint.
Typically you won't have to provide this.
:type api_version: str
:param api_version: The version of the API to call.
Typically you shouldn't provide this and instead
use the default for the library.
:rtype: str
:returns: The URL assembled from the pieces provided.
"""
url = self.API_URL_TEMPLATE.format(
api_base_url=self.get_api_base_url_for_mtls(api_base_url),
api_version=(api_version or self.API_VERSION),
path=path,
)
query_params = query_params or {}
if isinstance(query_params, collections.abc.Mapping):
query_params = query_params.copy()
else:
query_params_dict = collections.defaultdict(list)
for key, value in query_params:
query_params_dict[key].append(value)
query_params = query_params_dict
query_params.setdefault("prettyPrint", "false")
url += "?" + urlencode(query_params, doseq=True)
return url
def _make_request(
self,
method,
url,
data=None,
content_type=None,
headers=None,
target_object=None,
timeout=_DEFAULT_TIMEOUT,
extra_api_info=None,
):
"""A low level method to send a request to the API.
Typically, you shouldn't need to use this method.
:type method: str
:param method: The HTTP method to use in the request.
:type url: str
:param url: The URL to send the request to.
:type data: str
:param data: The data to send as the body of the request.
:type content_type: str
:param content_type: The proper MIME type of the data provided.
:type headers: dict
:param headers: (Optional) A dictionary of HTTP headers to send with
the request. If passed, will be modified directly
here with added headers.
:type target_object: object
:param target_object:
(Optional) Argument to be used by library callers. This can allow
custom behavior, for example, to defer an HTTP request and complete
initialization of the object at a later time.
:type timeout: float or tuple
:param timeout: (optional) The amount of time, in seconds, to wait
for the server response.
Can also be passed as a tuple (connect_timeout, read_timeout).
See :meth:`requests.Session.request` documentation for details.
:type extra_api_info: string
:param extra_api_info: (optional) Extra api info to be appended to
the X-Goog-API-Client header
:rtype: :class:`requests.Response`
:returns: The HTTP response.
"""
headers = headers or {}
headers.update(self.extra_headers)
headers["Accept-Encoding"] = "gzip"
if content_type:
headers["Content-Type"] = content_type
if extra_api_info:
headers[CLIENT_INFO_HEADER] = f"{self.user_agent} {extra_api_info}"
else:
headers[CLIENT_INFO_HEADER] = self.user_agent
headers["User-Agent"] = self.user_agent
return self._do_request(
method, url, headers, data, target_object, timeout=timeout
)
def _do_request(
self, method, url, headers, data, target_object, timeout=_DEFAULT_TIMEOUT
): # pylint: disable=unused-argument
"""Low-level helper: perform the actual API request over HTTP.
Allows batch context managers to override and defer a request.
:type method: str
:param method: The HTTP method to use in the request.
:type url: str
:param url: The URL to send the request to.
:type headers: dict
:param headers: A dictionary of HTTP headers to send with the request.
:type data: str
:param data: The data to send as the body of the request.
:type target_object: object
:param target_object:
(Optional) Unused ``target_object`` here but may be used by a
superclass.
:type timeout: float or tuple
:param timeout: (optional) The amount of time, in seconds, to wait
for the server response.
Can also be passed as a tuple (connect_timeout, read_timeout).
See :meth:`requests.Session.request` documentation for details.
:rtype: :class:`requests.Response`
:returns: The HTTP response.
"""
return self.http.request(
url=url, method=method, headers=headers, data=data, timeout=timeout
)
def api_request(
self,
method,
path,
query_params=None,
data=None,
content_type=None,
headers=None,
api_base_url=None,
api_version=None,
expect_json=True,
_target_object=None,
timeout=_DEFAULT_TIMEOUT,
extra_api_info=None,
):
"""Make a request over the HTTP transport to the API.
You shouldn't need to use this method, but if you plan to
interact with the API using these primitives, this is the
correct one to use.
:type method: str
:param method: The HTTP method name (ie, ``GET``, ``POST``, etc).
Required.
:type path: str
:param path: The path to the resource (ie, ``'/b/bucket-name'``).
Required.
:type query_params: dict or list
:param query_params: A dictionary of keys and values (or list of
key-value pairs) to insert into the query
string of the URL.
:type data: str
:param data: The data to send as the body of the request. Default is
the empty string.
:type content_type: str
:param content_type: The proper MIME type of the data provided. Default
is None.
:type headers: dict
:param headers: extra HTTP headers to be sent with the request.
:type api_base_url: str
:param api_base_url: The base URL for the API endpoint.
Typically you won't have to provide this.
Default is the standard API base URL.
:type api_version: str
:param api_version: The version of the API to call. Typically
you shouldn't provide this and instead use
the default for the library. Default is the
latest API version supported by
google-cloud-python.
:type expect_json: bool
:param expect_json: If True, this method will try to parse the
response as JSON and raise an exception if
that cannot be done. Default is True.
:type _target_object: :class:`object`
:param _target_object:
(Optional) Protected argument to be used by library callers. This
can allow custom behavior, for example, to defer an HTTP request
and complete initialization of the object at a later time.
:type timeout: float or tuple
:param timeout: (optional) The amount of time, in seconds, to wait
for the server response.
Can also be passed as a tuple (connect_timeout, read_timeout).
See :meth:`requests.Session.request` documentation for details.
:type extra_api_info: string
:param extra_api_info: (optional) Extra api info to be appended to
the X-Goog-API-Client header
:raises ~google.cloud.exceptions.GoogleCloudError: if the response code
is not 200 OK.
:raises ValueError: if the response content type is not JSON.
:rtype: dict or str
:returns: The API response payload, either as a raw string or
a dictionary if the response is valid JSON.
"""
url = self.build_api_url(
path=path,
query_params=query_params,
api_base_url=api_base_url,
api_version=api_version,
)
# Making the executive decision that any dictionary
# data will be sent properly as JSON.
if data and isinstance(data, dict):
data = json.dumps(data)
content_type = "application/json"
response = self._make_request(
method=method,
url=url,
data=data,
content_type=content_type,
headers=headers,
target_object=_target_object,
timeout=timeout,
extra_api_info=extra_api_info,
)
if not 200 <= response.status_code < 300:
raise exceptions.from_http_response(response)
if expect_json and response.content:
return response.json()
else:
return response.content

View File

@@ -0,0 +1,2 @@
# Marker file for PEP 561.
# This package uses inline types.

View File

@@ -0,0 +1,140 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.cloud._storage_v2 import gapic_version as package_version
__version__ = package_version.__version__
from .services.storage import StorageClient
from .services.storage import StorageAsyncClient
from .types.storage import AppendObjectSpec
from .types.storage import BidiReadHandle
from .types.storage import BidiReadObjectError
from .types.storage import BidiReadObjectRedirectedError
from .types.storage import BidiReadObjectRequest
from .types.storage import BidiReadObjectResponse
from .types.storage import BidiReadObjectSpec
from .types.storage import BidiWriteHandle
from .types.storage import BidiWriteObjectRedirectedError
from .types.storage import BidiWriteObjectRequest
from .types.storage import BidiWriteObjectResponse
from .types.storage import Bucket
from .types.storage import BucketAccessControl
from .types.storage import CancelResumableWriteRequest
from .types.storage import CancelResumableWriteResponse
from .types.storage import ChecksummedData
from .types.storage import CommonObjectRequestParams
from .types.storage import ComposeObjectRequest
from .types.storage import ContentRange
from .types.storage import CreateBucketRequest
from .types.storage import CustomerEncryption
from .types.storage import DeleteBucketRequest
from .types.storage import DeleteObjectRequest
from .types.storage import GetBucketRequest
from .types.storage import GetObjectRequest
from .types.storage import ListBucketsRequest
from .types.storage import ListBucketsResponse
from .types.storage import ListObjectsRequest
from .types.storage import ListObjectsResponse
from .types.storage import LockBucketRetentionPolicyRequest
from .types.storage import MoveObjectRequest
from .types.storage import Object
from .types.storage import ObjectAccessControl
from .types.storage import ObjectChecksums
from .types.storage import ObjectContexts
from .types.storage import ObjectCustomContextPayload
from .types.storage import ObjectRangeData
from .types.storage import Owner
from .types.storage import ProjectTeam
from .types.storage import QueryWriteStatusRequest
from .types.storage import QueryWriteStatusResponse
from .types.storage import ReadObjectRequest
from .types.storage import ReadObjectResponse
from .types.storage import ReadRange
from .types.storage import ReadRangeError
from .types.storage import RestoreObjectRequest
from .types.storage import RewriteObjectRequest
from .types.storage import RewriteResponse
from .types.storage import ServiceConstants
from .types.storage import StartResumableWriteRequest
from .types.storage import StartResumableWriteResponse
from .types.storage import UpdateBucketRequest
from .types.storage import UpdateObjectRequest
from .types.storage import WriteObjectRequest
from .types.storage import WriteObjectResponse
from .types.storage import WriteObjectSpec
__all__ = (
"StorageAsyncClient",
"AppendObjectSpec",
"BidiReadHandle",
"BidiReadObjectError",
"BidiReadObjectRedirectedError",
"BidiReadObjectRequest",
"BidiReadObjectResponse",
"BidiReadObjectSpec",
"BidiWriteHandle",
"BidiWriteObjectRedirectedError",
"BidiWriteObjectRequest",
"BidiWriteObjectResponse",
"Bucket",
"BucketAccessControl",
"CancelResumableWriteRequest",
"CancelResumableWriteResponse",
"ChecksummedData",
"CommonObjectRequestParams",
"ComposeObjectRequest",
"ContentRange",
"CreateBucketRequest",
"CustomerEncryption",
"DeleteBucketRequest",
"DeleteObjectRequest",
"GetBucketRequest",
"GetObjectRequest",
"ListBucketsRequest",
"ListBucketsResponse",
"ListObjectsRequest",
"ListObjectsResponse",
"LockBucketRetentionPolicyRequest",
"MoveObjectRequest",
"Object",
"ObjectAccessControl",
"ObjectChecksums",
"ObjectContexts",
"ObjectCustomContextPayload",
"ObjectRangeData",
"Owner",
"ProjectTeam",
"QueryWriteStatusRequest",
"QueryWriteStatusResponse",
"ReadObjectRequest",
"ReadObjectResponse",
"ReadRange",
"ReadRangeError",
"RestoreObjectRequest",
"RewriteObjectRequest",
"RewriteResponse",
"ServiceConstants",
"StartResumableWriteRequest",
"StartResumableWriteResponse",
"StorageClient",
"UpdateBucketRequest",
"UpdateObjectRequest",
"WriteObjectRequest",
"WriteObjectResponse",
"WriteObjectSpec",
)

View File

@@ -0,0 +1,263 @@
{
"comment": "This file maps proto services/RPCs to the corresponding library clients/methods",
"language": "python",
"libraryPackage": "google.cloud.storage_v2",
"protoPackage": "google.storage.v2",
"schema": "1.0",
"services": {
"Storage": {
"clients": {
"grpc": {
"libraryClient": "StorageClient",
"rpcs": {
"BidiReadObject": {
"methods": [
"bidi_read_object"
]
},
"BidiWriteObject": {
"methods": [
"bidi_write_object"
]
},
"CancelResumableWrite": {
"methods": [
"cancel_resumable_write"
]
},
"ComposeObject": {
"methods": [
"compose_object"
]
},
"CreateBucket": {
"methods": [
"create_bucket"
]
},
"DeleteBucket": {
"methods": [
"delete_bucket"
]
},
"DeleteObject": {
"methods": [
"delete_object"
]
},
"GetBucket": {
"methods": [
"get_bucket"
]
},
"GetIamPolicy": {
"methods": [
"get_iam_policy"
]
},
"GetObject": {
"methods": [
"get_object"
]
},
"ListBuckets": {
"methods": [
"list_buckets"
]
},
"ListObjects": {
"methods": [
"list_objects"
]
},
"LockBucketRetentionPolicy": {
"methods": [
"lock_bucket_retention_policy"
]
},
"MoveObject": {
"methods": [
"move_object"
]
},
"QueryWriteStatus": {
"methods": [
"query_write_status"
]
},
"ReadObject": {
"methods": [
"read_object"
]
},
"RestoreObject": {
"methods": [
"restore_object"
]
},
"RewriteObject": {
"methods": [
"rewrite_object"
]
},
"SetIamPolicy": {
"methods": [
"set_iam_policy"
]
},
"StartResumableWrite": {
"methods": [
"start_resumable_write"
]
},
"TestIamPermissions": {
"methods": [
"test_iam_permissions"
]
},
"UpdateBucket": {
"methods": [
"update_bucket"
]
},
"UpdateObject": {
"methods": [
"update_object"
]
},
"WriteObject": {
"methods": [
"write_object"
]
}
}
},
"grpc-async": {
"libraryClient": "StorageAsyncClient",
"rpcs": {
"BidiReadObject": {
"methods": [
"bidi_read_object"
]
},
"BidiWriteObject": {
"methods": [
"bidi_write_object"
]
},
"CancelResumableWrite": {
"methods": [
"cancel_resumable_write"
]
},
"ComposeObject": {
"methods": [
"compose_object"
]
},
"CreateBucket": {
"methods": [
"create_bucket"
]
},
"DeleteBucket": {
"methods": [
"delete_bucket"
]
},
"DeleteObject": {
"methods": [
"delete_object"
]
},
"GetBucket": {
"methods": [
"get_bucket"
]
},
"GetIamPolicy": {
"methods": [
"get_iam_policy"
]
},
"GetObject": {
"methods": [
"get_object"
]
},
"ListBuckets": {
"methods": [
"list_buckets"
]
},
"ListObjects": {
"methods": [
"list_objects"
]
},
"LockBucketRetentionPolicy": {
"methods": [
"lock_bucket_retention_policy"
]
},
"MoveObject": {
"methods": [
"move_object"
]
},
"QueryWriteStatus": {
"methods": [
"query_write_status"
]
},
"ReadObject": {
"methods": [
"read_object"
]
},
"RestoreObject": {
"methods": [
"restore_object"
]
},
"RewriteObject": {
"methods": [
"rewrite_object"
]
},
"SetIamPolicy": {
"methods": [
"set_iam_policy"
]
},
"StartResumableWrite": {
"methods": [
"start_resumable_write"
]
},
"TestIamPermissions": {
"methods": [
"test_iam_permissions"
]
},
"UpdateBucket": {
"methods": [
"update_bucket"
]
},
"UpdateObject": {
"methods": [
"update_object"
]
},
"WriteObject": {
"methods": [
"write_object"
]
}
}
}
}
}
}
}

View File

@@ -0,0 +1,16 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__version__ = "0.0.0" # {x-release-please-version}

View File

@@ -0,0 +1,2 @@
# Marker file for PEP 561.
# The google-cloud-storage package uses inline types.

View File

@@ -0,0 +1,15 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

View File

@@ -0,0 +1,22 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .client import StorageClient
from .async_client import StorageAsyncClient
__all__ = (
"StorageClient",
"StorageAsyncClient",
)

View File

@@ -0,0 +1,352 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.api_core import retry_async as retries_async
from typing import (
Any,
AsyncIterator,
Awaitable,
Callable,
Sequence,
Tuple,
Optional,
Iterator,
Union,
)
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
OptionalAsyncRetry = Union[
retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None
]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object, None] # type: ignore
OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore
from google.cloud._storage_v2.types import storage
class ListBucketsPager:
"""A pager for iterating through ``list_buckets`` requests.
This class thinly wraps an initial
:class:`google.cloud._storage_v2.types.ListBucketsResponse` object, and
provides an ``__iter__`` method to iterate through its
``buckets`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListBuckets`` requests and continue to iterate
through the ``buckets`` field on the
corresponding responses.
All the usual :class:`google.cloud._storage_v2.types.ListBucketsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., storage.ListBucketsResponse],
request: storage.ListBucketsRequest,
response: storage.ListBucketsResponse,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud._storage_v2.types.ListBucketsRequest):
The initial request object.
response (google.cloud._storage_v2.types.ListBucketsResponse):
The initial response object.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
sent along with the request as metadata. Normally, each value must be of type `str`,
but for metadata keys ending with the suffix `-bin`, the corresponding values must
be of type `bytes`.
"""
self._method = method
self._request = storage.ListBucketsRequest(request)
self._response = response
self._retry = retry
self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterator[storage.ListBucketsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(
self._request,
retry=self._retry,
timeout=self._timeout,
metadata=self._metadata,
)
yield self._response
def __iter__(self) -> Iterator[storage.Bucket]:
for page in self.pages:
yield from page.buckets
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListBucketsAsyncPager:
"""A pager for iterating through ``list_buckets`` requests.
This class thinly wraps an initial
:class:`google.cloud._storage_v2.types.ListBucketsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``buckets`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListBuckets`` requests and continue to iterate
through the ``buckets`` field on the
corresponding responses.
All the usual :class:`google.cloud._storage_v2.types.ListBucketsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., Awaitable[storage.ListBucketsResponse]],
request: storage.ListBucketsRequest,
response: storage.ListBucketsResponse,
*,
retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud._storage_v2.types.ListBucketsRequest):
The initial request object.
response (google.cloud._storage_v2.types.ListBucketsResponse):
The initial response object.
retry (google.api_core.retry.AsyncRetry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
sent along with the request as metadata. Normally, each value must be of type `str`,
but for metadata keys ending with the suffix `-bin`, the corresponding values must
be of type `bytes`.
"""
self._method = method
self._request = storage.ListBucketsRequest(request)
self._response = response
self._retry = retry
self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterator[storage.ListBucketsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(
self._request,
retry=self._retry,
timeout=self._timeout,
metadata=self._metadata,
)
yield self._response
def __aiter__(self) -> AsyncIterator[storage.Bucket]:
async def async_generator():
async for page in self.pages:
for response in page.buckets:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListObjectsPager:
"""A pager for iterating through ``list_objects`` requests.
This class thinly wraps an initial
:class:`google.cloud._storage_v2.types.ListObjectsResponse` object, and
provides an ``__iter__`` method to iterate through its
``objects`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListObjects`` requests and continue to iterate
through the ``objects`` field on the
corresponding responses.
All the usual :class:`google.cloud._storage_v2.types.ListObjectsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., storage.ListObjectsResponse],
request: storage.ListObjectsRequest,
response: storage.ListObjectsResponse,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud._storage_v2.types.ListObjectsRequest):
The initial request object.
response (google.cloud._storage_v2.types.ListObjectsResponse):
The initial response object.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
sent along with the request as metadata. Normally, each value must be of type `str`,
but for metadata keys ending with the suffix `-bin`, the corresponding values must
be of type `bytes`.
"""
self._method = method
self._request = storage.ListObjectsRequest(request)
self._response = response
self._retry = retry
self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterator[storage.ListObjectsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(
self._request,
retry=self._retry,
timeout=self._timeout,
metadata=self._metadata,
)
yield self._response
def __iter__(self) -> Iterator[storage.Object]:
for page in self.pages:
yield from page.objects
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListObjectsAsyncPager:
"""A pager for iterating through ``list_objects`` requests.
This class thinly wraps an initial
:class:`google.cloud._storage_v2.types.ListObjectsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``objects`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListObjects`` requests and continue to iterate
through the ``objects`` field on the
corresponding responses.
All the usual :class:`google.cloud._storage_v2.types.ListObjectsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., Awaitable[storage.ListObjectsResponse]],
request: storage.ListObjectsRequest,
response: storage.ListObjectsResponse,
*,
retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud._storage_v2.types.ListObjectsRequest):
The initial request object.
response (google.cloud._storage_v2.types.ListObjectsResponse):
The initial response object.
retry (google.api_core.retry.AsyncRetry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
sent along with the request as metadata. Normally, each value must be of type `str`,
but for metadata keys ending with the suffix `-bin`, the corresponding values must
be of type `bytes`.
"""
self._method = method
self._request = storage.ListObjectsRequest(request)
self._response = response
self._retry = retry
self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterator[storage.ListObjectsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(
self._request,
retry=self._retry,
timeout=self._timeout,
metadata=self._metadata,
)
yield self._response
def __aiter__(self) -> AsyncIterator[storage.Object]:
async def async_generator():
async for page in self.pages:
for response in page.objects:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)

View File

@@ -0,0 +1,33 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import StorageTransport
from .grpc import StorageGrpcTransport
from .grpc_asyncio import StorageGrpcAsyncIOTransport
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[StorageTransport]]
_transport_registry["grpc"] = StorageGrpcTransport
_transport_registry["grpc_asyncio"] = StorageGrpcAsyncIOTransport
__all__ = (
"StorageTransport",
"StorageGrpcTransport",
"StorageGrpcAsyncIOTransport",
)

View File

@@ -0,0 +1,507 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
from google.cloud._storage_v2 import gapic_version as package_version
import google.auth # type: ignore
import google.api_core
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
import google.protobuf
from google.cloud._storage_v2.types import storage
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=package_version.__version__
)
if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
class StorageTransport(abc.ABC):
"""Abstract transport class for Storage."""
AUTH_SCOPES = (
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
"https://www.googleapis.com/auth/devstorage.full_control",
"https://www.googleapis.com/auth/devstorage.read_only",
"https://www.googleapis.com/auth/devstorage.read_write",
)
DEFAULT_HOST: str = "storage.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
api_audience: Optional[str] = None,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to (default: 'storage.googleapis.com').
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): Deprecated. A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials. This argument will be
removed in the next major version of this library.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
# Save the scopes.
self._scopes = scopes
if not hasattr(self, "_ignore_credentials"):
self._ignore_credentials: bool = False
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None and not self._ignore_credentials:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# Don't apply audience if the credentials file passed from user.
if hasattr(credentials, "with_gdch_audience"):
credentials = credentials.with_gdch_audience(
api_audience if api_audience else host
)
# If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
@property
def host(self):
return self._host
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.delete_bucket: gapic_v1.method.wrap_method(
self.delete_bucket,
default_timeout=None,
client_info=client_info,
),
self.get_bucket: gapic_v1.method.wrap_method(
self.get_bucket,
default_timeout=None,
client_info=client_info,
),
self.create_bucket: gapic_v1.method.wrap_method(
self.create_bucket,
default_timeout=None,
client_info=client_info,
),
self.list_buckets: gapic_v1.method.wrap_method(
self.list_buckets,
default_timeout=None,
client_info=client_info,
),
self.lock_bucket_retention_policy: gapic_v1.method.wrap_method(
self.lock_bucket_retention_policy,
default_timeout=None,
client_info=client_info,
),
self.get_iam_policy: gapic_v1.method.wrap_method(
self.get_iam_policy,
default_timeout=None,
client_info=client_info,
),
self.set_iam_policy: gapic_v1.method.wrap_method(
self.set_iam_policy,
default_timeout=None,
client_info=client_info,
),
self.test_iam_permissions: gapic_v1.method.wrap_method(
self.test_iam_permissions,
default_timeout=None,
client_info=client_info,
),
self.update_bucket: gapic_v1.method.wrap_method(
self.update_bucket,
default_timeout=None,
client_info=client_info,
),
self.compose_object: gapic_v1.method.wrap_method(
self.compose_object,
default_timeout=None,
client_info=client_info,
),
self.delete_object: gapic_v1.method.wrap_method(
self.delete_object,
default_timeout=None,
client_info=client_info,
),
self.restore_object: gapic_v1.method.wrap_method(
self.restore_object,
default_timeout=None,
client_info=client_info,
),
self.cancel_resumable_write: gapic_v1.method.wrap_method(
self.cancel_resumable_write,
default_timeout=None,
client_info=client_info,
),
self.get_object: gapic_v1.method.wrap_method(
self.get_object,
default_timeout=None,
client_info=client_info,
),
self.read_object: gapic_v1.method.wrap_method(
self.read_object,
default_timeout=None,
client_info=client_info,
),
self.bidi_read_object: gapic_v1.method.wrap_method(
self.bidi_read_object,
default_timeout=None,
client_info=client_info,
),
self.update_object: gapic_v1.method.wrap_method(
self.update_object,
default_timeout=None,
client_info=client_info,
),
self.write_object: gapic_v1.method.wrap_method(
self.write_object,
default_timeout=None,
client_info=client_info,
),
self.bidi_write_object: gapic_v1.method.wrap_method(
self.bidi_write_object,
default_timeout=None,
client_info=client_info,
),
self.list_objects: gapic_v1.method.wrap_method(
self.list_objects,
default_timeout=None,
client_info=client_info,
),
self.rewrite_object: gapic_v1.method.wrap_method(
self.rewrite_object,
default_timeout=None,
client_info=client_info,
),
self.start_resumable_write: gapic_v1.method.wrap_method(
self.start_resumable_write,
default_timeout=None,
client_info=client_info,
),
self.query_write_status: gapic_v1.method.wrap_method(
self.query_write_status,
default_timeout=None,
client_info=client_info,
),
self.move_object: gapic_v1.method.wrap_method(
self.move_object,
default_timeout=None,
client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def delete_bucket(
self,
) -> Callable[
[storage.DeleteBucketRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def get_bucket(
self,
) -> Callable[
[storage.GetBucketRequest], Union[storage.Bucket, Awaitable[storage.Bucket]]
]:
raise NotImplementedError()
@property
def create_bucket(
self,
) -> Callable[
[storage.CreateBucketRequest], Union[storage.Bucket, Awaitable[storage.Bucket]]
]:
raise NotImplementedError()
@property
def list_buckets(
self,
) -> Callable[
[storage.ListBucketsRequest],
Union[storage.ListBucketsResponse, Awaitable[storage.ListBucketsResponse]],
]:
raise NotImplementedError()
@property
def lock_bucket_retention_policy(
self,
) -> Callable[
[storage.LockBucketRetentionPolicyRequest],
Union[storage.Bucket, Awaitable[storage.Bucket]],
]:
raise NotImplementedError()
@property
def get_iam_policy(
self,
) -> Callable[
[iam_policy_pb2.GetIamPolicyRequest],
Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]],
]:
raise NotImplementedError()
@property
def set_iam_policy(
self,
) -> Callable[
[iam_policy_pb2.SetIamPolicyRequest],
Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]],
]:
raise NotImplementedError()
@property
def test_iam_permissions(
self,
) -> Callable[
[iam_policy_pb2.TestIamPermissionsRequest],
Union[
iam_policy_pb2.TestIamPermissionsResponse,
Awaitable[iam_policy_pb2.TestIamPermissionsResponse],
],
]:
raise NotImplementedError()
@property
def update_bucket(
self,
) -> Callable[
[storage.UpdateBucketRequest], Union[storage.Bucket, Awaitable[storage.Bucket]]
]:
raise NotImplementedError()
@property
def compose_object(
self,
) -> Callable[
[storage.ComposeObjectRequest], Union[storage.Object, Awaitable[storage.Object]]
]:
raise NotImplementedError()
@property
def delete_object(
self,
) -> Callable[
[storage.DeleteObjectRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def restore_object(
self,
) -> Callable[
[storage.RestoreObjectRequest], Union[storage.Object, Awaitable[storage.Object]]
]:
raise NotImplementedError()
@property
def cancel_resumable_write(
self,
) -> Callable[
[storage.CancelResumableWriteRequest],
Union[
storage.CancelResumableWriteResponse,
Awaitable[storage.CancelResumableWriteResponse],
],
]:
raise NotImplementedError()
@property
def get_object(
self,
) -> Callable[
[storage.GetObjectRequest], Union[storage.Object, Awaitable[storage.Object]]
]:
raise NotImplementedError()
@property
def read_object(
self,
) -> Callable[
[storage.ReadObjectRequest],
Union[storage.ReadObjectResponse, Awaitable[storage.ReadObjectResponse]],
]:
raise NotImplementedError()
@property
def bidi_read_object(
self,
) -> Callable[
[storage.BidiReadObjectRequest],
Union[
storage.BidiReadObjectResponse, Awaitable[storage.BidiReadObjectResponse]
],
]:
raise NotImplementedError()
@property
def update_object(
self,
) -> Callable[
[storage.UpdateObjectRequest], Union[storage.Object, Awaitable[storage.Object]]
]:
raise NotImplementedError()
@property
def write_object(
self,
) -> Callable[
[storage.WriteObjectRequest],
Union[storage.WriteObjectResponse, Awaitable[storage.WriteObjectResponse]],
]:
raise NotImplementedError()
@property
def bidi_write_object(
self,
) -> Callable[
[storage.BidiWriteObjectRequest],
Union[
storage.BidiWriteObjectResponse, Awaitable[storage.BidiWriteObjectResponse]
],
]:
raise NotImplementedError()
@property
def list_objects(
self,
) -> Callable[
[storage.ListObjectsRequest],
Union[storage.ListObjectsResponse, Awaitable[storage.ListObjectsResponse]],
]:
raise NotImplementedError()
@property
def rewrite_object(
self,
) -> Callable[
[storage.RewriteObjectRequest],
Union[storage.RewriteResponse, Awaitable[storage.RewriteResponse]],
]:
raise NotImplementedError()
@property
def start_resumable_write(
self,
) -> Callable[
[storage.StartResumableWriteRequest],
Union[
storage.StartResumableWriteResponse,
Awaitable[storage.StartResumableWriteResponse],
],
]:
raise NotImplementedError()
@property
def query_write_status(
self,
) -> Callable[
[storage.QueryWriteStatusRequest],
Union[
storage.QueryWriteStatusResponse,
Awaitable[storage.QueryWriteStatusResponse],
],
]:
raise NotImplementedError()
@property
def move_object(
self,
) -> Callable[
[storage.MoveObjectRequest], Union[storage.Object, Awaitable[storage.Object]]
]:
raise NotImplementedError()
@property
def kind(self) -> str:
raise NotImplementedError()
__all__ = ("StorageTransport",)

View File

@@ -0,0 +1,132 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .storage import (
AppendObjectSpec,
BidiReadHandle,
BidiReadObjectError,
BidiReadObjectRedirectedError,
BidiReadObjectRequest,
BidiReadObjectResponse,
BidiReadObjectSpec,
BidiWriteHandle,
BidiWriteObjectRedirectedError,
BidiWriteObjectRequest,
BidiWriteObjectResponse,
Bucket,
BucketAccessControl,
CancelResumableWriteRequest,
CancelResumableWriteResponse,
ChecksummedData,
CommonObjectRequestParams,
ComposeObjectRequest,
ContentRange,
CreateBucketRequest,
CustomerEncryption,
DeleteBucketRequest,
DeleteObjectRequest,
GetBucketRequest,
GetObjectRequest,
ListBucketsRequest,
ListBucketsResponse,
ListObjectsRequest,
ListObjectsResponse,
LockBucketRetentionPolicyRequest,
MoveObjectRequest,
Object,
ObjectAccessControl,
ObjectChecksums,
ObjectContexts,
ObjectCustomContextPayload,
ObjectRangeData,
Owner,
ProjectTeam,
QueryWriteStatusRequest,
QueryWriteStatusResponse,
ReadObjectRequest,
ReadObjectResponse,
ReadRange,
ReadRangeError,
RestoreObjectRequest,
RewriteObjectRequest,
RewriteResponse,
ServiceConstants,
StartResumableWriteRequest,
StartResumableWriteResponse,
UpdateBucketRequest,
UpdateObjectRequest,
WriteObjectRequest,
WriteObjectResponse,
WriteObjectSpec,
)
__all__ = (
"AppendObjectSpec",
"BidiReadHandle",
"BidiReadObjectError",
"BidiReadObjectRedirectedError",
"BidiReadObjectRequest",
"BidiReadObjectResponse",
"BidiReadObjectSpec",
"BidiWriteHandle",
"BidiWriteObjectRedirectedError",
"BidiWriteObjectRequest",
"BidiWriteObjectResponse",
"Bucket",
"BucketAccessControl",
"CancelResumableWriteRequest",
"CancelResumableWriteResponse",
"ChecksummedData",
"CommonObjectRequestParams",
"ComposeObjectRequest",
"ContentRange",
"CreateBucketRequest",
"CustomerEncryption",
"DeleteBucketRequest",
"DeleteObjectRequest",
"GetBucketRequest",
"GetObjectRequest",
"ListBucketsRequest",
"ListBucketsResponse",
"ListObjectsRequest",
"ListObjectsResponse",
"LockBucketRetentionPolicyRequest",
"MoveObjectRequest",
"Object",
"ObjectAccessControl",
"ObjectChecksums",
"ObjectContexts",
"ObjectCustomContextPayload",
"ObjectRangeData",
"Owner",
"ProjectTeam",
"QueryWriteStatusRequest",
"QueryWriteStatusResponse",
"ReadObjectRequest",
"ReadObjectResponse",
"ReadRange",
"ReadRangeError",
"RestoreObjectRequest",
"RewriteObjectRequest",
"RewriteResponse",
"ServiceConstants",
"StartResumableWriteRequest",
"StartResumableWriteResponse",
"UpdateBucketRequest",
"UpdateObjectRequest",
"WriteObjectRequest",
"WriteObjectResponse",
"WriteObjectSpec",
)

View File

@@ -0,0 +1,121 @@
# Copyright 2014 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared testing utilities."""
from __future__ import absolute_import
class _Monkey(object):
"""Context-manager for replacing module names in the scope of a test."""
def __init__(self, module, **kw):
self.module = module
if not kw: # pragma: NO COVER
raise ValueError("_Monkey was used with nothing to monkey-patch")
self.to_restore = {key: getattr(module, key) for key in kw}
for key, value in kw.items():
setattr(module, key, value)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
for key, value in self.to_restore.items():
setattr(self.module, key, value)
class _NamedTemporaryFile(object):
def __init__(self, suffix=""):
import os
import tempfile
filehandle, self.name = tempfile.mkstemp(suffix=suffix)
os.close(filehandle)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
import os
os.remove(self.name)
def _tempdir_maker():
import contextlib
import shutil
import tempfile
@contextlib.contextmanager
def _tempdir_mgr():
temp_dir = tempfile.mkdtemp()
yield temp_dir
shutil.rmtree(temp_dir)
return _tempdir_mgr
# pylint: disable=invalid-name
# Retain _tempdir as a constant for backwards compatibility despite
# being an invalid name.
_tempdir = _tempdir_maker()
del _tempdir_maker
# pylint: enable=invalid-name
class _GAXBaseAPI(object):
_random_gax_error = False
def __init__(self, **kw):
self.__dict__.update(kw)
@staticmethod
def _make_grpc_error(status_code, trailing=None):
from grpc._channel import _RPCState
from google.cloud.exceptions import GrpcRendezvous
details = "Some error details."
exc_state = _RPCState((), None, trailing, status_code, details)
return GrpcRendezvous(exc_state, None, None, None)
def _make_grpc_not_found(self):
from grpc import StatusCode
return self._make_grpc_error(StatusCode.NOT_FOUND)
def _make_grpc_failed_precondition(self):
from grpc import StatusCode
return self._make_grpc_error(StatusCode.FAILED_PRECONDITION)
def _make_grpc_already_exists(self):
from grpc import StatusCode
return self._make_grpc_error(StatusCode.ALREADY_EXISTS)
def _make_grpc_deadline_exceeded(self):
from grpc import StatusCode
return self._make_grpc_error(StatusCode.DEADLINE_EXCEEDED)
class _GAXPageIterator(object):
def __init__(self, *pages, **kwargs):
self._pages = iter(pages)
self.page_token = kwargs.get("page_token")
def __next__(self):
"""Iterate to the next page."""
return next(self._pages)

View File

@@ -0,0 +1,2 @@
# Marker file for PEP 561.
# This package uses inline types.

View File

@@ -0,0 +1,342 @@
# Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base classes for client used to interact with Google Cloud APIs."""
import io
import json
import os
from pickle import PicklingError
from typing import Tuple
from typing import Union
import google.api_core.client_options
import google.api_core.exceptions
import google.auth
from google.auth import environment_vars
import google.auth.credentials
import google.auth.transport.requests
from google.cloud._helpers import _determine_default_project
from google.oauth2 import service_account
try:
import google.auth.api_key
HAS_GOOGLE_AUTH_API_KEY = True
except ImportError: # pragma: NO COVER
HAS_GOOGLE_AUTH_API_KEY = False # pragma: NO COVER
# TODO: Investigate adding a test for google.auth.api_key ImportError (https://github.com/googleapis/python-cloud-core/issues/334)
_GOOGLE_AUTH_CREDENTIALS_HELP = (
"This library only supports credentials from google-auth-library-python. "
"See https://google-auth.readthedocs.io/en/latest/ "
"for help on authentication with this library."
)
# Default timeout for auth requests.
_CREDENTIALS_REFRESH_TIMEOUT = 300
class _ClientFactoryMixin(object):
"""Mixin to allow factories that create credentials.
.. note::
This class is virtual.
"""
_SET_PROJECT = False
@classmethod
def from_service_account_info(cls, info, *args, **kwargs):
"""Factory to retrieve JSON credentials while creating client.
:type info: dict
:param info:
The JSON object with a private key and other credentials
information (downloaded from the Google APIs console).
:type args: tuple
:param args: Remaining positional arguments to pass to constructor.
:param kwargs: Remaining keyword arguments to pass to constructor.
:rtype: :class:`_ClientFactoryMixin`
:returns: The client created with the retrieved JSON credentials.
:raises TypeError: if there is a conflict with the kwargs
and the credentials created by the factory.
"""
if "credentials" in kwargs:
raise TypeError("credentials must not be in keyword arguments")
credentials = service_account.Credentials.from_service_account_info(info)
if cls._SET_PROJECT:
if "project" not in kwargs:
kwargs["project"] = info.get("project_id")
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_json(cls, json_credentials_path, *args, **kwargs):
"""Factory to retrieve JSON credentials while creating client.
:type json_credentials_path: str
:param json_credentials_path: The path to a private key file (this file
was given to you when you created the
service account). This file must contain
a JSON object with a private key and
other credentials information (downloaded
from the Google APIs console).
:type args: tuple
:param args: Remaining positional arguments to pass to constructor.
:param kwargs: Remaining keyword arguments to pass to constructor.
:rtype: :class:`_ClientFactoryMixin`
:returns: The client created with the retrieved JSON credentials.
:raises TypeError: if there is a conflict with the kwargs
and the credentials created by the factory.
"""
with io.open(json_credentials_path, "r", encoding="utf-8") as json_fi:
credentials_info = json.load(json_fi)
return cls.from_service_account_info(credentials_info, *args, **kwargs)
class Client(_ClientFactoryMixin):
"""Client to bundle configuration needed for API requests.
Stores ``credentials`` and an HTTP object so that subclasses
can pass them along to a connection class.
If no value is passed in for ``_http``, a :class:`requests.Session` object
will be created and authorized with the ``credentials``. If not, the
``credentials`` and ``_http`` need not be related.
Callers and subclasses may seek to use the private key from
``credentials`` to sign data.
Args:
credentials (google.auth.credentials.Credentials):
(Optional) The OAuth2 Credentials to use for this client. If not
passed (and if no ``_http`` object is passed), falls back to the
default inferred from the environment.
client_options (google.api_core.client_options.ClientOptions):
(Optional) Custom options for the client.
_http (requests.Session):
(Optional) HTTP object to make requests. Can be any object that
defines ``request()`` with the same interface as
:meth:`requests.Session.request`. If not passed, an ``_http``
object is created that is bound to the ``credentials`` for the
current object.
This parameter should be considered private, and could change in
the future.
Raises:
google.auth.exceptions.DefaultCredentialsError:
Raised if ``credentials`` is not specified and the library fails
to acquire default credentials.
"""
SCOPE: Union[Tuple[str, ...], None] = None
"""The scopes required for authenticating with a service.
Needs to be set by subclasses.
"""
def __init__(self, credentials=None, _http=None, client_options=None):
if isinstance(client_options, dict):
client_options = google.api_core.client_options.from_dict(client_options)
if client_options is None:
client_options = google.api_core.client_options.ClientOptions()
if credentials and client_options.credentials_file:
raise google.api_core.exceptions.DuplicateCredentialArgs(
"'credentials' and 'client_options.credentials_file' are mutually exclusive."
)
if (
HAS_GOOGLE_AUTH_API_KEY
and client_options.api_key
and (credentials or client_options.credentials_file)
):
raise google.api_core.exceptions.DuplicateCredentialArgs(
"'client_options.api_key' is mutually exclusive with 'credentials' and 'client_options.credentials_file'."
)
if credentials and not isinstance(
credentials, google.auth.credentials.Credentials
):
raise ValueError(_GOOGLE_AUTH_CREDENTIALS_HELP)
scopes = client_options.scopes or self.SCOPE
# if no http is provided, credentials must exist
if not _http and credentials is None:
if client_options.credentials_file:
credentials, _ = google.auth.load_credentials_from_file(
client_options.credentials_file, scopes=scopes
)
elif HAS_GOOGLE_AUTH_API_KEY and client_options.api_key is not None:
credentials = google.auth.api_key.Credentials(client_options.api_key)
else:
credentials, _ = google.auth.default(scopes=scopes)
self._credentials = google.auth.credentials.with_scopes_if_required(
credentials, scopes=scopes
)
if client_options.quota_project_id:
self._credentials = self._credentials.with_quota_project(
client_options.quota_project_id
)
self._http_internal = _http
self._client_cert_source = client_options.client_cert_source
def __getstate__(self):
"""Explicitly state that clients are not pickleable."""
raise PicklingError(
"\n".join(
[
"Pickling client objects is explicitly not supported.",
"Clients have non-trivial state that is local and unpickleable.",
]
)
)
@property
def _http(self):
"""Getter for object used for HTTP transport.
:rtype: :class:`~requests.Session`
:returns: An HTTP object.
"""
if self._http_internal is None:
self._http_internal = google.auth.transport.requests.AuthorizedSession(
self._credentials,
refresh_timeout=_CREDENTIALS_REFRESH_TIMEOUT,
)
self._http_internal.configure_mtls_channel(self._client_cert_source)
return self._http_internal
def close(self):
"""Clean up transport, if set.
Suggested use:
.. code-block:: python
import contextlib
with contextlib.closing(client): # closes on exit
do_something_with(client)
"""
if self._http_internal is not None:
self._http_internal.close()
class _ClientProjectMixin(object):
"""Mixin to allow setting the project on the client.
:type project: str
:param project:
(Optional) the project which the client acts on behalf of. If not
passed, falls back to the default inferred from the environment.
:type credentials: :class:`google.auth.credentials.Credentials`
:param credentials:
(Optional) credentials used to discover a project, if not passed.
:raises: :class:`EnvironmentError` if the project is neither passed in nor
set on the credentials or in the environment. :class:`ValueError`
if the project value is invalid.
"""
def __init__(self, project=None, credentials=None):
# This test duplicates the one from `google.auth.default`, but earlier,
# for backward compatibility: we want the environment variable to
# override any project set on the credentials. See:
# https://github.com/googleapis/python-cloud-core/issues/27
if project is None:
project = os.getenv(
environment_vars.PROJECT,
os.getenv(environment_vars.LEGACY_PROJECT),
)
# Project set on explicit credentials overrides discovery from
# SDK / GAE / GCE.
if project is None and credentials is not None:
project = getattr(credentials, "project_id", None)
if project is None:
project = self._determine_default(project)
if project is None:
raise EnvironmentError(
"Project was not passed and could not be "
"determined from the environment."
)
if isinstance(project, bytes):
project = project.decode("utf-8")
if not isinstance(project, str):
raise ValueError("Project must be a string.")
self.project = project
@staticmethod
def _determine_default(project):
"""Helper: use default project detection."""
return _determine_default_project(project)
class ClientWithProject(Client, _ClientProjectMixin):
"""Client that also stores a project.
:type project: str
:param project: the project which the client acts on behalf of. If not
passed falls back to the default inferred from the
environment.
:type credentials: :class:`~google.auth.credentials.Credentials`
:param credentials: (Optional) The OAuth2 Credentials to use for this
client. If not passed (and if no ``_http`` object is
passed), falls back to the default inferred from the
environment.
:type _http: :class:`~requests.Session`
:param _http: (Optional) HTTP object to make requests. Can be any object
that defines ``request()`` with the same interface as
:meth:`~requests.Session.request`. If not passed, an
``_http`` object is created that is bound to the
``credentials`` for the current object.
This parameter should be considered private, and could
change in the future.
:raises: :class:`ValueError` if the project is neither passed in nor
set in the environment.
"""
_SET_PROJECT = True # Used by from_service_account_json()
def __init__(self, project=None, credentials=None, client_options=None, _http=None):
_ClientProjectMixin.__init__(self, project=project, credentials=credentials)
Client.__init__(
self, credentials=credentials, client_options=client_options, _http=_http
)

View File

@@ -0,0 +1,2 @@
# Marker file for PEP 561.
# This package uses inline types.

View File

@@ -0,0 +1,52 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// This file contains stub messages for common resources in GCP.
// It is not intended to be directly generated, and is instead used by
// other tooling to be able to match common resource patterns.
syntax = "proto3";
package google.cloud;
import "google/api/resource.proto";
option (google.api.resource_definition) = {
type: "cloudresourcemanager.googleapis.com/Project"
pattern: "projects/{project}"
};
option (google.api.resource_definition) = {
type: "cloudresourcemanager.googleapis.com/Organization"
pattern: "organizations/{organization}"
};
option (google.api.resource_definition) = {
type: "cloudresourcemanager.googleapis.com/Folder"
pattern: "folders/{folder}"
};
option (google.api.resource_definition) = {
type: "cloudbilling.googleapis.com/BillingAccount"
pattern: "billingAccounts/{billing_account}"
};
option (google.api.resource_definition) = {
type: "locations.googleapis.com/Location"
pattern: "projects/{project}/locations/{location}"
};

View File

@@ -0,0 +1,47 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/common_resources.proto
# Protobuf Python Version: 4.25.3
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b"\n#google/cloud/common_resources.proto\x12\x0cgoogle.cloud\x1a\x19google/api/resource.protoB\xf9\x02\xea\x41\x41\n+cloudresourcemanager.googleapis.com/Project\x12\x12projects/{project}\xea\x41P\n0cloudresourcemanager.googleapis.com/Organization\x12\x1corganizations/{organization}\xea\x41>\n*cloudresourcemanager.googleapis.com/Folder\x12\x10\x66olders/{folder}\xea\x41O\n*cloudbilling.googleapis.com/BillingAccount\x12!billingAccounts/{billing_account}\xea\x41L\n!locations.googleapis.com/Location\x12'projects/{project}/locations/{location}b\x06proto3"
)
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(
DESCRIPTOR, "google.cloud.common_resources_pb2", _globals
)
if _descriptor._USE_C_DESCRIPTORS == False:
_globals["DESCRIPTOR"]._options = None
_globals[
"DESCRIPTOR"
]._serialized_options = b"\352AA\n+cloudresourcemanager.googleapis.com/Project\022\022projects/{project}\352AP\n0cloudresourcemanager.googleapis.com/Organization\022\034organizations/{organization}\352A>\n*cloudresourcemanager.googleapis.com/Folder\022\020folders/{folder}\352AO\n*cloudbilling.googleapis.com/BillingAccount\022!billingAccounts/{billing_account}\352AL\n!locations.googleapis.com/Location\022'projects/{project}/locations/{location}"
# @@protoc_insertion_point(module_scope)

View File

@@ -0,0 +1,20 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import ClassVar as _ClassVar
from google.api import resource_pb2 as _resource_pb2
from google.protobuf import descriptor as _descriptor
DESCRIPTOR: _descriptor.FileDescriptor

View File

@@ -0,0 +1,38 @@
# Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Comprehensive list of environment variables used in google-cloud.
These enable many types of implicit behavior in both production
and tests.
"""
GCD_DATASET = "DATASTORE_DATASET"
"""Environment variable defining default dataset ID under GCD."""
GCD_HOST = "DATASTORE_EMULATOR_HOST"
"""Environment variable defining host for GCD dataset server."""
PUBSUB_EMULATOR = "PUBSUB_EMULATOR_HOST"
"""Environment variable defining host for Pub/Sub emulator."""
BIGTABLE_EMULATOR = "BIGTABLE_EMULATOR_HOST"
"""Environment variable defining host for Bigtable emulator."""
DISABLE_GRPC = "GOOGLE_CLOUD_DISABLE_GRPC"
"""Environment variable acting as flag to disable gRPC.
To be used for APIs where both an HTTP and gRPC implementation
exist.
"""

View File

@@ -0,0 +1,2 @@
# Marker file for PEP 561.
# This package uses inline types.

View File

@@ -0,0 +1,59 @@
# Copyright 2014 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=invalid-name
# pylint recognizies all of these aliases as constants and thinks they have
# invalid names.
"""Custom exceptions for :mod:`google.cloud` package."""
# Avoid the grpc and google.cloud.grpc collision.
from __future__ import absolute_import
from google.api_core import exceptions
try:
from grpc._channel import _Rendezvous
except ImportError: # pragma: NO COVER
_Rendezvous = None
GrpcRendezvous = _Rendezvous
"""Exception class raised by gRPC stable."""
# Aliases to moved classes.
GoogleCloudError = exceptions.GoogleAPICallError
Redirection = exceptions.Redirection
MovedPermanently = exceptions.MovedPermanently
NotModified = exceptions.NotModified
TemporaryRedirect = exceptions.TemporaryRedirect
ResumeIncomplete = exceptions.ResumeIncomplete
ClientError = exceptions.ClientError
BadRequest = exceptions.BadRequest
Unauthorized = exceptions.Unauthorized
Forbidden = exceptions.Forbidden
NotFound = exceptions.NotFound
MethodNotAllowed = exceptions.MethodNotAllowed
Conflict = exceptions.Conflict
LengthRequired = exceptions.LengthRequired
PreconditionFailed = exceptions.PreconditionFailed
RequestRangeNotSatisfiable = exceptions.RequestRangeNotSatisfiable
TooManyRequests = exceptions.TooManyRequests
ServerError = exceptions.ServerError
InternalServerError = exceptions.InternalServerError
MethodNotImplemented = exceptions.MethodNotImplemented
BadGateway = exceptions.BadGateway
ServiceUnavailable = exceptions.ServiceUnavailable
GatewayTimeout = exceptions.GatewayTimeout
from_http_status = exceptions.from_http_status
from_http_response = exceptions.from_http_response

View File

@@ -0,0 +1,2 @@
# Marker file for PEP 561.
# This package uses inline types.

View File

@@ -0,0 +1,150 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// This file contains custom annotations that are used by GAPIC generators to
// handle Long Running Operation methods (LRO) that are NOT compliant with
// https://google.aip.dev/151. These annotations are public for technical
// reasons only. Please DO NOT USE them in your protos.
syntax = "proto3";
package google.cloud;
import "google/protobuf/descriptor.proto";
option go_package = "google.golang.org/genproto/googleapis/cloud/extendedops;extendedops";
option java_multiple_files = true;
option java_outer_classname = "ExtendedOperationsProto";
option java_package = "com.google.cloud";
option objc_class_prefix = "GAPI";
// FieldOptions to match corresponding fields in the initial request,
// polling request and operation response messages.
//
// Example:
//
// In an API-specific operation message:
//
// message MyOperation {
// string http_error_message = 1 [(operation_field) = ERROR_MESSAGE];
// int32 http_error_status_code = 2 [(operation_field) = ERROR_CODE];
// string id = 3 [(operation_field) = NAME];
// Status status = 4 [(operation_field) = STATUS];
// }
//
// In a polling request message (the one which is used to poll for an LRO
// status):
//
// message MyPollingRequest {
// string operation = 1 [(operation_response_field) = "id"];
// string project = 2;
// string region = 3;
// }
//
// In an initial request message (the one which starts an LRO):
//
// message MyInitialRequest {
// string my_project = 2 [(operation_request_field) = "project"];
// string my_region = 3 [(operation_request_field) = "region"];
// }
//
extend google.protobuf.FieldOptions {
// A field annotation that maps fields in an API-specific Operation object to
// their standard counterparts in google.longrunning.Operation. See
// OperationResponseMapping enum definition.
OperationResponseMapping operation_field = 1149;
// A field annotation that maps fields in the initial request message
// (the one which started the LRO) to their counterparts in the polling
// request message. For non-standard LRO, the polling response may be missing
// some of the information needed to make a subsequent polling request. The
// missing information (for example, project or region ID) is contained in the
// fields of the initial request message that this annotation must be applied
// to. The string value of the annotation corresponds to the name of the
// counterpart field in the polling request message that the annotated field's
// value will be copied to.
string operation_request_field = 1150;
// A field annotation that maps fields in the polling request message to their
// counterparts in the initial and/or polling response message. The initial
// and the polling methods return an API-specific Operation object. Some of
// the fields from that response object must be reused in the subsequent
// request (like operation name/ID) to fully identify the polled operation.
// This annotation must be applied to the fields in the polling request
// message, the string value of the annotation must correspond to the name of
// the counterpart field in the Operation response object whose value will be
// copied to the annotated field.
string operation_response_field = 1151;
}
// MethodOptions to identify the actual service and method used for operation
// status polling.
//
// Example:
//
// In a method, which starts an LRO:
//
// service MyService {
// rpc Foo(MyInitialRequest) returns (MyOperation) {
// option (operation_service) = "MyPollingService";
// }
// }
//
// In a polling method:
//
// service MyPollingService {
// rpc Get(MyPollingRequest) returns (MyOperation) {
// option (operation_polling_method) = true;
// }
// }
extend google.protobuf.MethodOptions {
// A method annotation that maps an LRO method (the one which starts an LRO)
// to the service, which will be used to poll for the operation status. The
// annotation must be applied to the method which starts an LRO, the string
// value of the annotation must correspond to the name of the service used to
// poll for the operation status.
string operation_service = 1249;
// A method annotation that marks methods that can be used for polling
// operation status (e.g. the MyPollingService.Get(MyPollingRequest) method).
bool operation_polling_method = 1250;
}
// An enum to be used to mark the essential (for polling) fields in an
// API-specific Operation object. A custom Operation object may contain many
// different fields, but only few of them are essential to conduct a successful
// polling process.
enum OperationResponseMapping {
// Do not use.
UNDEFINED = 0;
// A field in an API-specific (custom) Operation object which carries the same
// meaning as google.longrunning.Operation.name.
NAME = 1;
// A field in an API-specific (custom) Operation object which carries the same
// meaning as google.longrunning.Operation.done. If the annotated field is of
// an enum type, `annotated_field_name == EnumType.DONE` semantics should be
// equivalent to `Operation.done == true`. If the annotated field is of type
// boolean, then it should follow the same semantics as Operation.done.
// Otherwise, a non-empty value should be treated as `Operation.done == true`.
STATUS = 2;
// A field in an API-specific (custom) Operation object which carries the same
// meaning as google.longrunning.Operation.error.code.
ERROR_CODE = 3;
// A field in an API-specific (custom) Operation object which carries the same
// meaning as google.longrunning.Operation.error.message.
ERROR_MESSAGE = 4;
}

View File

@@ -0,0 +1,49 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/extended_operations.proto
# Protobuf Python Version: 4.25.3
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b"\n&google/cloud/extended_operations.proto\x12\x0cgoogle.cloud\x1a google/protobuf/descriptor.proto*b\n\x18OperationResponseMapping\x12\r\n\tUNDEFINED\x10\x00\x12\x08\n\x04NAME\x10\x01\x12\n\n\x06STATUS\x10\x02\x12\x0e\n\nERROR_CODE\x10\x03\x12\x11\n\rERROR_MESSAGE\x10\x04:_\n\x0foperation_field\x12\x1d.google.protobuf.FieldOptions\x18\xfd\x08 \x01(\x0e\x32&.google.cloud.OperationResponseMapping:?\n\x17operation_request_field\x12\x1d.google.protobuf.FieldOptions\x18\xfe\x08 \x01(\t:@\n\x18operation_response_field\x12\x1d.google.protobuf.FieldOptions\x18\xff\x08 \x01(\t::\n\x11operation_service\x12\x1e.google.protobuf.MethodOptions\x18\xe1\t \x01(\t:A\n\x18operation_polling_method\x12\x1e.google.protobuf.MethodOptions\x18\xe2\t \x01(\x08\x42y\n\x10\x63om.google.cloudB\x17\x45xtendedOperationsProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/extendedops;extendedops\xa2\x02\x04GAPIb\x06proto3"
)
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(
DESCRIPTOR, "google.cloud.extended_operations_pb2", _globals
)
if _descriptor._USE_C_DESCRIPTORS == False:
_globals["DESCRIPTOR"]._options = None
_globals[
"DESCRIPTOR"
]._serialized_options = b"\n\020com.google.cloudB\027ExtendedOperationsProtoP\001ZCgoogle.golang.org/genproto/googleapis/cloud/extendedops;extendedops\242\002\004GAPI"
_globals["_OPERATIONRESPONSEMAPPING"]._serialized_start = 90
_globals["_OPERATIONRESPONSEMAPPING"]._serialized_end = 188
# @@protoc_insertion_point(module_scope)

View File

@@ -0,0 +1,45 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import ClassVar as _ClassVar
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pb2 as _descriptor_pb2
from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper
DESCRIPTOR: _descriptor.FileDescriptor
class OperationResponseMapping(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
__slots__ = ()
UNDEFINED: _ClassVar[OperationResponseMapping]
NAME: _ClassVar[OperationResponseMapping]
STATUS: _ClassVar[OperationResponseMapping]
ERROR_CODE: _ClassVar[OperationResponseMapping]
ERROR_MESSAGE: _ClassVar[OperationResponseMapping]
UNDEFINED: OperationResponseMapping
NAME: OperationResponseMapping
STATUS: OperationResponseMapping
ERROR_CODE: OperationResponseMapping
ERROR_MESSAGE: OperationResponseMapping
OPERATION_FIELD_FIELD_NUMBER: _ClassVar[int]
operation_field: _descriptor.FieldDescriptor
OPERATION_REQUEST_FIELD_FIELD_NUMBER: _ClassVar[int]
operation_request_field: _descriptor.FieldDescriptor
OPERATION_RESPONSE_FIELD_FIELD_NUMBER: _ClassVar[int]
operation_response_field: _descriptor.FieldDescriptor
OPERATION_SERVICE_FIELD_NUMBER: _ClassVar[int]
operation_service: _descriptor.FieldDescriptor
OPERATION_POLLING_METHOD_FIELD_NUMBER: _ClassVar[int]
operation_polling_method: _descriptor.FieldDescriptor

View File

@@ -0,0 +1,99 @@
# Copyright 2017 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Python idiomatic client for Google Cloud Firestore."""
from google.cloud.firestore_v1 import gapic_version as package_version
__version__ = package_version.__version__
from google.cloud.firestore_v1 import And
from google.cloud.firestore_v1 import ArrayRemove
from google.cloud.firestore_v1 import ArrayUnion
from google.cloud.firestore_v1 import AsyncClient
from google.cloud.firestore_v1 import AsyncCollectionReference
from google.cloud.firestore_v1 import AsyncDocumentReference
from google.cloud.firestore_v1 import AsyncQuery
from google.cloud.firestore_v1 import async_transactional
from google.cloud.firestore_v1 import AsyncTransaction
from google.cloud.firestore_v1 import AsyncWriteBatch
from google.cloud.firestore_v1 import Client
from google.cloud.firestore_v1 import CountAggregation
from google.cloud.firestore_v1 import CollectionGroup
from google.cloud.firestore_v1 import CollectionReference
from google.cloud.firestore_v1 import DELETE_FIELD
from google.cloud.firestore_v1 import DocumentReference
from google.cloud.firestore_v1 import DocumentSnapshot
from google.cloud.firestore_v1 import DocumentTransform
from google.cloud.firestore_v1 import ExistsOption
from google.cloud.firestore_v1 import ExplainOptions
from google.cloud.firestore_v1 import FieldFilter
from google.cloud.firestore_v1 import GeoPoint
from google.cloud.firestore_v1 import Increment
from google.cloud.firestore_v1 import LastUpdateOption
from google.cloud.firestore_v1 import Maximum
from google.cloud.firestore_v1 import Minimum
from google.cloud.firestore_v1 import Or
from google.cloud.firestore_v1 import Query
from google.cloud.firestore_v1 import ReadAfterWriteError
from google.cloud.firestore_v1 import SERVER_TIMESTAMP
from google.cloud.firestore_v1 import Transaction
from google.cloud.firestore_v1 import transactional
from google.cloud.firestore_v1 import types
from google.cloud.firestore_v1 import Watch
from google.cloud.firestore_v1 import WriteBatch
from google.cloud.firestore_v1 import WriteOption
from typing import List
__all__: List[str] = [
"__version__",
"And",
"ArrayRemove",
"ArrayUnion",
"AsyncClient",
"AsyncCollectionReference",
"AsyncDocumentReference",
"AsyncQuery",
"async_transactional",
"AsyncTransaction",
"AsyncWriteBatch",
"Client",
"CountAggregation",
"CollectionGroup",
"CollectionReference",
"DELETE_FIELD",
"DocumentReference",
"DocumentSnapshot",
"DocumentTransform",
"ExistsOption",
"ExplainOptions",
"FieldFilter",
"GeoPoint",
"Increment",
"LastUpdateOption",
"Maximum",
"Minimum",
"Or",
"Query",
"ReadAfterWriteError",
"SERVER_TIMESTAMP",
"Transaction",
"transactional",
"types",
"Watch",
"WriteBatch",
"WriteOption",
]

View File

@@ -0,0 +1,16 @@
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__version__ = "2.21.0" # {x-release-please-version}

View File

@@ -0,0 +1,65 @@
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .services.firestore_admin import FirestoreAdminClient
from .types.field import Field
from .types.firestore_admin import CreateIndexRequest
from .types.firestore_admin import DeleteIndexRequest
from .types.firestore_admin import ExportDocumentsRequest
from .types.firestore_admin import GetFieldRequest
from .types.firestore_admin import GetIndexRequest
from .types.firestore_admin import ImportDocumentsRequest
from .types.firestore_admin import ListFieldsRequest
from .types.firestore_admin import ListFieldsResponse
from .types.firestore_admin import ListIndexesRequest
from .types.firestore_admin import ListIndexesResponse
from .types.firestore_admin import UpdateFieldRequest
from .types.index import Index
from .types.location import LocationMetadata
from .types.operation import ExportDocumentsMetadata
from .types.operation import ExportDocumentsResponse
from .types.operation import FieldOperationMetadata
from .types.operation import ImportDocumentsMetadata
from .types.operation import IndexOperationMetadata
from .types.operation import OperationState
from .types.operation import Progress
__all__ = (
"CreateIndexRequest",
"DeleteIndexRequest",
"ExportDocumentsMetadata",
"ExportDocumentsRequest",
"ExportDocumentsResponse",
"Field",
"FieldOperationMetadata",
"GetFieldRequest",
"GetIndexRequest",
"ImportDocumentsMetadata",
"ImportDocumentsRequest",
"Index",
"IndexOperationMetadata",
"ListFieldsRequest",
"ListFieldsResponse",
"ListIndexesRequest",
"ListIndexesResponse",
"LocationMetadata",
"OperationState",
"Progress",
"UpdateFieldRequest",
"FirestoreAdminClient",
)

View File

@@ -0,0 +1,493 @@
{
"comment": "This file maps proto services/RPCs to the corresponding library clients/methods",
"language": "python",
"libraryPackage": "google.cloud.firestore_admin_v1",
"protoPackage": "google.firestore.admin.v1",
"schema": "1.0",
"services": {
"FirestoreAdmin": {
"clients": {
"grpc": {
"libraryClient": "FirestoreAdminClient",
"rpcs": {
"BulkDeleteDocuments": {
"methods": [
"bulk_delete_documents"
]
},
"CreateBackupSchedule": {
"methods": [
"create_backup_schedule"
]
},
"CreateDatabase": {
"methods": [
"create_database"
]
},
"CreateIndex": {
"methods": [
"create_index"
]
},
"CreateUserCreds": {
"methods": [
"create_user_creds"
]
},
"DeleteBackup": {
"methods": [
"delete_backup"
]
},
"DeleteBackupSchedule": {
"methods": [
"delete_backup_schedule"
]
},
"DeleteDatabase": {
"methods": [
"delete_database"
]
},
"DeleteIndex": {
"methods": [
"delete_index"
]
},
"DeleteUserCreds": {
"methods": [
"delete_user_creds"
]
},
"DisableUserCreds": {
"methods": [
"disable_user_creds"
]
},
"EnableUserCreds": {
"methods": [
"enable_user_creds"
]
},
"ExportDocuments": {
"methods": [
"export_documents"
]
},
"GetBackup": {
"methods": [
"get_backup"
]
},
"GetBackupSchedule": {
"methods": [
"get_backup_schedule"
]
},
"GetDatabase": {
"methods": [
"get_database"
]
},
"GetField": {
"methods": [
"get_field"
]
},
"GetIndex": {
"methods": [
"get_index"
]
},
"GetUserCreds": {
"methods": [
"get_user_creds"
]
},
"ImportDocuments": {
"methods": [
"import_documents"
]
},
"ListBackupSchedules": {
"methods": [
"list_backup_schedules"
]
},
"ListBackups": {
"methods": [
"list_backups"
]
},
"ListDatabases": {
"methods": [
"list_databases"
]
},
"ListFields": {
"methods": [
"list_fields"
]
},
"ListIndexes": {
"methods": [
"list_indexes"
]
},
"ListUserCreds": {
"methods": [
"list_user_creds"
]
},
"ResetUserPassword": {
"methods": [
"reset_user_password"
]
},
"RestoreDatabase": {
"methods": [
"restore_database"
]
},
"UpdateBackupSchedule": {
"methods": [
"update_backup_schedule"
]
},
"UpdateDatabase": {
"methods": [
"update_database"
]
},
"UpdateField": {
"methods": [
"update_field"
]
}
}
},
"grpc-async": {
"libraryClient": "FirestoreAdminAsyncClient",
"rpcs": {
"BulkDeleteDocuments": {
"methods": [
"bulk_delete_documents"
]
},
"CreateBackupSchedule": {
"methods": [
"create_backup_schedule"
]
},
"CreateDatabase": {
"methods": [
"create_database"
]
},
"CreateIndex": {
"methods": [
"create_index"
]
},
"CreateUserCreds": {
"methods": [
"create_user_creds"
]
},
"DeleteBackup": {
"methods": [
"delete_backup"
]
},
"DeleteBackupSchedule": {
"methods": [
"delete_backup_schedule"
]
},
"DeleteDatabase": {
"methods": [
"delete_database"
]
},
"DeleteIndex": {
"methods": [
"delete_index"
]
},
"DeleteUserCreds": {
"methods": [
"delete_user_creds"
]
},
"DisableUserCreds": {
"methods": [
"disable_user_creds"
]
},
"EnableUserCreds": {
"methods": [
"enable_user_creds"
]
},
"ExportDocuments": {
"methods": [
"export_documents"
]
},
"GetBackup": {
"methods": [
"get_backup"
]
},
"GetBackupSchedule": {
"methods": [
"get_backup_schedule"
]
},
"GetDatabase": {
"methods": [
"get_database"
]
},
"GetField": {
"methods": [
"get_field"
]
},
"GetIndex": {
"methods": [
"get_index"
]
},
"GetUserCreds": {
"methods": [
"get_user_creds"
]
},
"ImportDocuments": {
"methods": [
"import_documents"
]
},
"ListBackupSchedules": {
"methods": [
"list_backup_schedules"
]
},
"ListBackups": {
"methods": [
"list_backups"
]
},
"ListDatabases": {
"methods": [
"list_databases"
]
},
"ListFields": {
"methods": [
"list_fields"
]
},
"ListIndexes": {
"methods": [
"list_indexes"
]
},
"ListUserCreds": {
"methods": [
"list_user_creds"
]
},
"ResetUserPassword": {
"methods": [
"reset_user_password"
]
},
"RestoreDatabase": {
"methods": [
"restore_database"
]
},
"UpdateBackupSchedule": {
"methods": [
"update_backup_schedule"
]
},
"UpdateDatabase": {
"methods": [
"update_database"
]
},
"UpdateField": {
"methods": [
"update_field"
]
}
}
},
"rest": {
"libraryClient": "FirestoreAdminClient",
"rpcs": {
"BulkDeleteDocuments": {
"methods": [
"bulk_delete_documents"
]
},
"CreateBackupSchedule": {
"methods": [
"create_backup_schedule"
]
},
"CreateDatabase": {
"methods": [
"create_database"
]
},
"CreateIndex": {
"methods": [
"create_index"
]
},
"CreateUserCreds": {
"methods": [
"create_user_creds"
]
},
"DeleteBackup": {
"methods": [
"delete_backup"
]
},
"DeleteBackupSchedule": {
"methods": [
"delete_backup_schedule"
]
},
"DeleteDatabase": {
"methods": [
"delete_database"
]
},
"DeleteIndex": {
"methods": [
"delete_index"
]
},
"DeleteUserCreds": {
"methods": [
"delete_user_creds"
]
},
"DisableUserCreds": {
"methods": [
"disable_user_creds"
]
},
"EnableUserCreds": {
"methods": [
"enable_user_creds"
]
},
"ExportDocuments": {
"methods": [
"export_documents"
]
},
"GetBackup": {
"methods": [
"get_backup"
]
},
"GetBackupSchedule": {
"methods": [
"get_backup_schedule"
]
},
"GetDatabase": {
"methods": [
"get_database"
]
},
"GetField": {
"methods": [
"get_field"
]
},
"GetIndex": {
"methods": [
"get_index"
]
},
"GetUserCreds": {
"methods": [
"get_user_creds"
]
},
"ImportDocuments": {
"methods": [
"import_documents"
]
},
"ListBackupSchedules": {
"methods": [
"list_backup_schedules"
]
},
"ListBackups": {
"methods": [
"list_backups"
]
},
"ListDatabases": {
"methods": [
"list_databases"
]
},
"ListFields": {
"methods": [
"list_fields"
]
},
"ListIndexes": {
"methods": [
"list_indexes"
]
},
"ListUserCreds": {
"methods": [
"list_user_creds"
]
},
"ResetUserPassword": {
"methods": [
"reset_user_password"
]
},
"RestoreDatabase": {
"methods": [
"restore_database"
]
},
"UpdateBackupSchedule": {
"methods": [
"update_backup_schedule"
]
},
"UpdateDatabase": {
"methods": [
"update_database"
]
},
"UpdateField": {
"methods": [
"update_field"
]
}
}
}
}
}
}
}

View File

@@ -0,0 +1,16 @@
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__version__ = "2.21.0" # {x-release-please-version}

View File

@@ -0,0 +1,2 @@
# Marker file for PEP 561.
# The google-cloud-firestore-admin package uses inline types.

View File

@@ -0,0 +1,15 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

View File

@@ -0,0 +1,22 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .client import FirestoreAdminClient
from .async_client import FirestoreAdminAsyncClient
__all__ = (
"FirestoreAdminClient",
"FirestoreAdminAsyncClient",
)

View File

@@ -0,0 +1,354 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.api_core import retry_async as retries_async
from typing import (
Any,
AsyncIterator,
Awaitable,
Callable,
Sequence,
Tuple,
Optional,
Iterator,
Union,
)
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
OptionalAsyncRetry = Union[
retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None
]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object, None] # type: ignore
OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore
from google.cloud.firestore_admin_v1.types import field
from google.cloud.firestore_admin_v1.types import firestore_admin
from google.cloud.firestore_admin_v1.types import index
class ListIndexesPager:
"""A pager for iterating through ``list_indexes`` requests.
This class thinly wraps an initial
:class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse` object, and
provides an ``__iter__`` method to iterate through its
``indexes`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListIndexes`` requests and continue to iterate
through the ``indexes`` field on the
corresponding responses.
All the usual :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., firestore_admin.ListIndexesResponse],
request: firestore_admin.ListIndexesRequest,
response: firestore_admin.ListIndexesResponse,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.firestore_admin_v1.types.ListIndexesRequest):
The initial request object.
response (google.cloud.firestore_admin_v1.types.ListIndexesResponse):
The initial response object.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
sent along with the request as metadata. Normally, each value must be of type `str`,
but for metadata keys ending with the suffix `-bin`, the corresponding values must
be of type `bytes`.
"""
self._method = method
self._request = firestore_admin.ListIndexesRequest(request)
self._response = response
self._retry = retry
self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterator[firestore_admin.ListIndexesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(
self._request,
retry=self._retry,
timeout=self._timeout,
metadata=self._metadata,
)
yield self._response
def __iter__(self) -> Iterator[index.Index]:
for page in self.pages:
yield from page.indexes
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListIndexesAsyncPager:
"""A pager for iterating through ``list_indexes`` requests.
This class thinly wraps an initial
:class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse` object, and
provides an ``__aiter__`` method to iterate through its
``indexes`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListIndexes`` requests and continue to iterate
through the ``indexes`` field on the
corresponding responses.
All the usual :class:`google.cloud.firestore_admin_v1.types.ListIndexesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., Awaitable[firestore_admin.ListIndexesResponse]],
request: firestore_admin.ListIndexesRequest,
response: firestore_admin.ListIndexesResponse,
*,
retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.firestore_admin_v1.types.ListIndexesRequest):
The initial request object.
response (google.cloud.firestore_admin_v1.types.ListIndexesResponse):
The initial response object.
retry (google.api_core.retry.AsyncRetry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
sent along with the request as metadata. Normally, each value must be of type `str`,
but for metadata keys ending with the suffix `-bin`, the corresponding values must
be of type `bytes`.
"""
self._method = method
self._request = firestore_admin.ListIndexesRequest(request)
self._response = response
self._retry = retry
self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterator[firestore_admin.ListIndexesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(
self._request,
retry=self._retry,
timeout=self._timeout,
metadata=self._metadata,
)
yield self._response
def __aiter__(self) -> AsyncIterator[index.Index]:
async def async_generator():
async for page in self.pages:
for response in page.indexes:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListFieldsPager:
"""A pager for iterating through ``list_fields`` requests.
This class thinly wraps an initial
:class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse` object, and
provides an ``__iter__`` method to iterate through its
``fields`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListFields`` requests and continue to iterate
through the ``fields`` field on the
corresponding responses.
All the usual :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., firestore_admin.ListFieldsResponse],
request: firestore_admin.ListFieldsRequest,
response: firestore_admin.ListFieldsResponse,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.firestore_admin_v1.types.ListFieldsRequest):
The initial request object.
response (google.cloud.firestore_admin_v1.types.ListFieldsResponse):
The initial response object.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
sent along with the request as metadata. Normally, each value must be of type `str`,
but for metadata keys ending with the suffix `-bin`, the corresponding values must
be of type `bytes`.
"""
self._method = method
self._request = firestore_admin.ListFieldsRequest(request)
self._response = response
self._retry = retry
self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterator[firestore_admin.ListFieldsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(
self._request,
retry=self._retry,
timeout=self._timeout,
metadata=self._metadata,
)
yield self._response
def __iter__(self) -> Iterator[field.Field]:
for page in self.pages:
yield from page.fields
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListFieldsAsyncPager:
"""A pager for iterating through ``list_fields`` requests.
This class thinly wraps an initial
:class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``fields`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListFields`` requests and continue to iterate
through the ``fields`` field on the
corresponding responses.
All the usual :class:`google.cloud.firestore_admin_v1.types.ListFieldsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., Awaitable[firestore_admin.ListFieldsResponse]],
request: firestore_admin.ListFieldsRequest,
response: firestore_admin.ListFieldsResponse,
*,
retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.firestore_admin_v1.types.ListFieldsRequest):
The initial request object.
response (google.cloud.firestore_admin_v1.types.ListFieldsResponse):
The initial response object.
retry (google.api_core.retry.AsyncRetry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
sent along with the request as metadata. Normally, each value must be of type `str`,
but for metadata keys ending with the suffix `-bin`, the corresponding values must
be of type `bytes`.
"""
self._method = method
self._request = firestore_admin.ListFieldsRequest(request)
self._response = response
self._retry = retry
self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterator[firestore_admin.ListFieldsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(
self._request,
retry=self._retry,
timeout=self._timeout,
metadata=self._metadata,
)
yield self._response
def __aiter__(self) -> AsyncIterator[field.Field]:
async def async_generator():
async for page in self.pages:
for response in page.fields:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)

View File

@@ -0,0 +1,38 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import FirestoreAdminTransport
from .grpc import FirestoreAdminGrpcTransport
from .grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport
from .rest import FirestoreAdminRestTransport
from .rest import FirestoreAdminRestInterceptor
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreAdminTransport]]
_transport_registry["grpc"] = FirestoreAdminGrpcTransport
_transport_registry["grpc_asyncio"] = FirestoreAdminGrpcAsyncIOTransport
_transport_registry["rest"] = FirestoreAdminRestTransport
__all__ = (
"FirestoreAdminTransport",
"FirestoreAdminGrpcTransport",
"FirestoreAdminGrpcAsyncIOTransport",
"FirestoreAdminRestTransport",
"FirestoreAdminRestInterceptor",
)

View File

@@ -0,0 +1,729 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
from google.cloud.firestore_admin_v1 import gapic_version as package_version
import google.auth # type: ignore
import google.api_core
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.api_core import operations_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
import google.protobuf
from google.cloud.firestore_admin_v1.types import backup
from google.cloud.firestore_admin_v1.types import database
from google.cloud.firestore_admin_v1.types import field
from google.cloud.firestore_admin_v1.types import firestore_admin
from google.cloud.firestore_admin_v1.types import index
from google.cloud.firestore_admin_v1.types import schedule
from google.cloud.firestore_admin_v1.types import user_creds
from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds
from google.cloud.location import locations_pb2 # type: ignore
from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=package_version.__version__
)
if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
class FirestoreAdminTransport(abc.ABC):
"""Abstract transport class for FirestoreAdmin."""
AUTH_SCOPES = (
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/datastore",
)
DEFAULT_HOST: str = "firestore.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
api_audience: Optional[str] = None,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to (default: 'firestore.googleapis.com').
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
# Save the scopes.
self._scopes = scopes
if not hasattr(self, "_ignore_credentials"):
self._ignore_credentials: bool = False
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None and not self._ignore_credentials:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# Don't apply audience if the credentials file passed from user.
if hasattr(credentials, "with_gdch_audience"):
credentials = credentials.with_gdch_audience(
api_audience if api_audience else host
)
# If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
@property
def host(self):
return self._host
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.create_index: gapic_v1.method.wrap_method(
self.create_index,
default_timeout=60.0,
client_info=client_info,
),
self.list_indexes: gapic_v1.method.wrap_method(
self.list_indexes,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.InternalServerError,
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.get_index: gapic_v1.method.wrap_method(
self.get_index,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.InternalServerError,
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.delete_index: gapic_v1.method.wrap_method(
self.delete_index,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.InternalServerError,
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.get_field: gapic_v1.method.wrap_method(
self.get_field,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.InternalServerError,
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.update_field: gapic_v1.method.wrap_method(
self.update_field,
default_timeout=60.0,
client_info=client_info,
),
self.list_fields: gapic_v1.method.wrap_method(
self.list_fields,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.InternalServerError,
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.export_documents: gapic_v1.method.wrap_method(
self.export_documents,
default_timeout=60.0,
client_info=client_info,
),
self.import_documents: gapic_v1.method.wrap_method(
self.import_documents,
default_timeout=60.0,
client_info=client_info,
),
self.bulk_delete_documents: gapic_v1.method.wrap_method(
self.bulk_delete_documents,
default_timeout=60.0,
client_info=client_info,
),
self.create_database: gapic_v1.method.wrap_method(
self.create_database,
default_timeout=120.0,
client_info=client_info,
),
self.get_database: gapic_v1.method.wrap_method(
self.get_database,
default_timeout=None,
client_info=client_info,
),
self.list_databases: gapic_v1.method.wrap_method(
self.list_databases,
default_timeout=None,
client_info=client_info,
),
self.update_database: gapic_v1.method.wrap_method(
self.update_database,
default_timeout=None,
client_info=client_info,
),
self.delete_database: gapic_v1.method.wrap_method(
self.delete_database,
default_timeout=None,
client_info=client_info,
),
self.create_user_creds: gapic_v1.method.wrap_method(
self.create_user_creds,
default_timeout=None,
client_info=client_info,
),
self.get_user_creds: gapic_v1.method.wrap_method(
self.get_user_creds,
default_timeout=None,
client_info=client_info,
),
self.list_user_creds: gapic_v1.method.wrap_method(
self.list_user_creds,
default_timeout=None,
client_info=client_info,
),
self.enable_user_creds: gapic_v1.method.wrap_method(
self.enable_user_creds,
default_timeout=None,
client_info=client_info,
),
self.disable_user_creds: gapic_v1.method.wrap_method(
self.disable_user_creds,
default_timeout=None,
client_info=client_info,
),
self.reset_user_password: gapic_v1.method.wrap_method(
self.reset_user_password,
default_timeout=None,
client_info=client_info,
),
self.delete_user_creds: gapic_v1.method.wrap_method(
self.delete_user_creds,
default_timeout=None,
client_info=client_info,
),
self.get_backup: gapic_v1.method.wrap_method(
self.get_backup,
default_timeout=None,
client_info=client_info,
),
self.list_backups: gapic_v1.method.wrap_method(
self.list_backups,
default_timeout=None,
client_info=client_info,
),
self.delete_backup: gapic_v1.method.wrap_method(
self.delete_backup,
default_timeout=None,
client_info=client_info,
),
self.restore_database: gapic_v1.method.wrap_method(
self.restore_database,
default_timeout=120.0,
client_info=client_info,
),
self.create_backup_schedule: gapic_v1.method.wrap_method(
self.create_backup_schedule,
default_timeout=None,
client_info=client_info,
),
self.get_backup_schedule: gapic_v1.method.wrap_method(
self.get_backup_schedule,
default_timeout=None,
client_info=client_info,
),
self.list_backup_schedules: gapic_v1.method.wrap_method(
self.list_backup_schedules,
default_timeout=None,
client_info=client_info,
),
self.update_backup_schedule: gapic_v1.method.wrap_method(
self.update_backup_schedule,
default_timeout=None,
client_info=client_info,
),
self.delete_backup_schedule: gapic_v1.method.wrap_method(
self.delete_backup_schedule,
default_timeout=None,
client_info=client_info,
),
self.cancel_operation: gapic_v1.method.wrap_method(
self.cancel_operation,
default_timeout=None,
client_info=client_info,
),
self.delete_operation: gapic_v1.method.wrap_method(
self.delete_operation,
default_timeout=None,
client_info=client_info,
),
self.get_operation: gapic_v1.method.wrap_method(
self.get_operation,
default_timeout=None,
client_info=client_info,
),
self.list_operations: gapic_v1.method.wrap_method(
self.list_operations,
default_timeout=None,
client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def operations_client(self):
"""Return the client designed to process long-running operations."""
raise NotImplementedError()
@property
def create_index(
self,
) -> Callable[
[firestore_admin.CreateIndexRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def list_indexes(
self,
) -> Callable[
[firestore_admin.ListIndexesRequest],
Union[
firestore_admin.ListIndexesResponse,
Awaitable[firestore_admin.ListIndexesResponse],
],
]:
raise NotImplementedError()
@property
def get_index(
self,
) -> Callable[
[firestore_admin.GetIndexRequest], Union[index.Index, Awaitable[index.Index]]
]:
raise NotImplementedError()
@property
def delete_index(
self,
) -> Callable[
[firestore_admin.DeleteIndexRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def get_field(
self,
) -> Callable[
[firestore_admin.GetFieldRequest], Union[field.Field, Awaitable[field.Field]]
]:
raise NotImplementedError()
@property
def update_field(
self,
) -> Callable[
[firestore_admin.UpdateFieldRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def list_fields(
self,
) -> Callable[
[firestore_admin.ListFieldsRequest],
Union[
firestore_admin.ListFieldsResponse,
Awaitable[firestore_admin.ListFieldsResponse],
],
]:
raise NotImplementedError()
@property
def export_documents(
self,
) -> Callable[
[firestore_admin.ExportDocumentsRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def import_documents(
self,
) -> Callable[
[firestore_admin.ImportDocumentsRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def bulk_delete_documents(
self,
) -> Callable[
[firestore_admin.BulkDeleteDocumentsRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def create_database(
self,
) -> Callable[
[firestore_admin.CreateDatabaseRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def get_database(
self,
) -> Callable[
[firestore_admin.GetDatabaseRequest],
Union[database.Database, Awaitable[database.Database]],
]:
raise NotImplementedError()
@property
def list_databases(
self,
) -> Callable[
[firestore_admin.ListDatabasesRequest],
Union[
firestore_admin.ListDatabasesResponse,
Awaitable[firestore_admin.ListDatabasesResponse],
],
]:
raise NotImplementedError()
@property
def update_database(
self,
) -> Callable[
[firestore_admin.UpdateDatabaseRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def delete_database(
self,
) -> Callable[
[firestore_admin.DeleteDatabaseRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def create_user_creds(
self,
) -> Callable[
[firestore_admin.CreateUserCredsRequest],
Union[gfa_user_creds.UserCreds, Awaitable[gfa_user_creds.UserCreds]],
]:
raise NotImplementedError()
@property
def get_user_creds(
self,
) -> Callable[
[firestore_admin.GetUserCredsRequest],
Union[user_creds.UserCreds, Awaitable[user_creds.UserCreds]],
]:
raise NotImplementedError()
@property
def list_user_creds(
self,
) -> Callable[
[firestore_admin.ListUserCredsRequest],
Union[
firestore_admin.ListUserCredsResponse,
Awaitable[firestore_admin.ListUserCredsResponse],
],
]:
raise NotImplementedError()
@property
def enable_user_creds(
self,
) -> Callable[
[firestore_admin.EnableUserCredsRequest],
Union[user_creds.UserCreds, Awaitable[user_creds.UserCreds]],
]:
raise NotImplementedError()
@property
def disable_user_creds(
self,
) -> Callable[
[firestore_admin.DisableUserCredsRequest],
Union[user_creds.UserCreds, Awaitable[user_creds.UserCreds]],
]:
raise NotImplementedError()
@property
def reset_user_password(
self,
) -> Callable[
[firestore_admin.ResetUserPasswordRequest],
Union[user_creds.UserCreds, Awaitable[user_creds.UserCreds]],
]:
raise NotImplementedError()
@property
def delete_user_creds(
self,
) -> Callable[
[firestore_admin.DeleteUserCredsRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def get_backup(
self,
) -> Callable[
[firestore_admin.GetBackupRequest],
Union[backup.Backup, Awaitable[backup.Backup]],
]:
raise NotImplementedError()
@property
def list_backups(
self,
) -> Callable[
[firestore_admin.ListBackupsRequest],
Union[
firestore_admin.ListBackupsResponse,
Awaitable[firestore_admin.ListBackupsResponse],
],
]:
raise NotImplementedError()
@property
def delete_backup(
self,
) -> Callable[
[firestore_admin.DeleteBackupRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def restore_database(
self,
) -> Callable[
[firestore_admin.RestoreDatabaseRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def create_backup_schedule(
self,
) -> Callable[
[firestore_admin.CreateBackupScheduleRequest],
Union[schedule.BackupSchedule, Awaitable[schedule.BackupSchedule]],
]:
raise NotImplementedError()
@property
def get_backup_schedule(
self,
) -> Callable[
[firestore_admin.GetBackupScheduleRequest],
Union[schedule.BackupSchedule, Awaitable[schedule.BackupSchedule]],
]:
raise NotImplementedError()
@property
def list_backup_schedules(
self,
) -> Callable[
[firestore_admin.ListBackupSchedulesRequest],
Union[
firestore_admin.ListBackupSchedulesResponse,
Awaitable[firestore_admin.ListBackupSchedulesResponse],
],
]:
raise NotImplementedError()
@property
def update_backup_schedule(
self,
) -> Callable[
[firestore_admin.UpdateBackupScheduleRequest],
Union[schedule.BackupSchedule, Awaitable[schedule.BackupSchedule]],
]:
raise NotImplementedError()
@property
def delete_backup_schedule(
self,
) -> Callable[
[firestore_admin.DeleteBackupScheduleRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def list_operations(
self,
) -> Callable[
[operations_pb2.ListOperationsRequest],
Union[
operations_pb2.ListOperationsResponse,
Awaitable[operations_pb2.ListOperationsResponse],
],
]:
raise NotImplementedError()
@property
def get_operation(
self,
) -> Callable[
[operations_pb2.GetOperationRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def cancel_operation(
self,
) -> Callable[[operations_pb2.CancelOperationRequest], None,]:
raise NotImplementedError()
@property
def delete_operation(
self,
) -> Callable[[operations_pb2.DeleteOperationRequest], None,]:
raise NotImplementedError()
@property
def kind(self) -> str:
raise NotImplementedError()
__all__ = ("FirestoreAdminTransport",)

View File

@@ -0,0 +1,154 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .backup import (
Backup,
)
from .database import (
Database,
)
from .field import (
Field,
)
from .firestore_admin import (
BulkDeleteDocumentsRequest,
BulkDeleteDocumentsResponse,
CreateBackupScheduleRequest,
CreateDatabaseMetadata,
CreateDatabaseRequest,
CreateIndexRequest,
CreateUserCredsRequest,
DeleteBackupRequest,
DeleteBackupScheduleRequest,
DeleteDatabaseMetadata,
DeleteDatabaseRequest,
DeleteIndexRequest,
DeleteUserCredsRequest,
DisableUserCredsRequest,
EnableUserCredsRequest,
ExportDocumentsRequest,
GetBackupRequest,
GetBackupScheduleRequest,
GetDatabaseRequest,
GetFieldRequest,
GetIndexRequest,
GetUserCredsRequest,
ImportDocumentsRequest,
ListBackupSchedulesRequest,
ListBackupSchedulesResponse,
ListBackupsRequest,
ListBackupsResponse,
ListDatabasesRequest,
ListDatabasesResponse,
ListFieldsRequest,
ListFieldsResponse,
ListIndexesRequest,
ListIndexesResponse,
ListUserCredsRequest,
ListUserCredsResponse,
ResetUserPasswordRequest,
RestoreDatabaseRequest,
UpdateBackupScheduleRequest,
UpdateDatabaseMetadata,
UpdateDatabaseRequest,
UpdateFieldRequest,
)
from .index import (
Index,
)
from .location import (
LocationMetadata,
)
from .operation import (
BulkDeleteDocumentsMetadata,
ExportDocumentsMetadata,
ExportDocumentsResponse,
FieldOperationMetadata,
ImportDocumentsMetadata,
IndexOperationMetadata,
Progress,
RestoreDatabaseMetadata,
OperationState,
)
from .schedule import (
BackupSchedule,
DailyRecurrence,
WeeklyRecurrence,
)
from .user_creds import (
UserCreds,
)
__all__ = (
"Backup",
"Database",
"Field",
"BulkDeleteDocumentsRequest",
"BulkDeleteDocumentsResponse",
"CreateBackupScheduleRequest",
"CreateDatabaseMetadata",
"CreateDatabaseRequest",
"CreateIndexRequest",
"CreateUserCredsRequest",
"DeleteBackupRequest",
"DeleteBackupScheduleRequest",
"DeleteDatabaseMetadata",
"DeleteDatabaseRequest",
"DeleteIndexRequest",
"DeleteUserCredsRequest",
"DisableUserCredsRequest",
"EnableUserCredsRequest",
"ExportDocumentsRequest",
"GetBackupRequest",
"GetBackupScheduleRequest",
"GetDatabaseRequest",
"GetFieldRequest",
"GetIndexRequest",
"GetUserCredsRequest",
"ImportDocumentsRequest",
"ListBackupSchedulesRequest",
"ListBackupSchedulesResponse",
"ListBackupsRequest",
"ListBackupsResponse",
"ListDatabasesRequest",
"ListDatabasesResponse",
"ListFieldsRequest",
"ListFieldsResponse",
"ListIndexesRequest",
"ListIndexesResponse",
"ListUserCredsRequest",
"ListUserCredsResponse",
"ResetUserPasswordRequest",
"RestoreDatabaseRequest",
"UpdateBackupScheduleRequest",
"UpdateDatabaseMetadata",
"UpdateDatabaseRequest",
"UpdateFieldRequest",
"Index",
"LocationMetadata",
"BulkDeleteDocumentsMetadata",
"ExportDocumentsMetadata",
"ExportDocumentsResponse",
"FieldOperationMetadata",
"ImportDocumentsMetadata",
"IndexOperationMetadata",
"Progress",
"RestoreDatabaseMetadata",
"OperationState",
"BackupSchedule",
"DailyRecurrence",
"WeeklyRecurrence",
"UserCreds",
)

View File

@@ -0,0 +1,153 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import annotations
from typing import MutableMapping, MutableSequence
import proto # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.firestore.admin.v1",
manifest={
"Backup",
},
)
class Backup(proto.Message):
r"""A Backup of a Cloud Firestore Database.
The backup contains all documents and index configurations for
the given database at a specific point in time.
Attributes:
name (str):
Output only. The unique resource name of the Backup.
Format is
``projects/{project}/locations/{location}/backups/{backup}``.
database (str):
Output only. Name of the Firestore database that the backup
is from.
Format is ``projects/{project}/databases/{database}``.
database_uid (str):
Output only. The system-generated UUID4 for
the Firestore database that the backup is from.
snapshot_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The backup contains an
externally consistent copy of the database at
this time.
expire_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The timestamp at which this
backup expires.
stats (google.cloud.firestore_admin_v1.types.Backup.Stats):
Output only. Statistics about the backup.
This data only becomes available after the
backup is fully materialized to secondary
storage. This field will be empty till then.
state (google.cloud.firestore_admin_v1.types.Backup.State):
Output only. The current state of the backup.
"""
class State(proto.Enum):
r"""Indicate the current state of the backup.
Values:
STATE_UNSPECIFIED (0):
The state is unspecified.
CREATING (1):
The pending backup is still being created.
Operations on the backup will be rejected in
this state.
READY (2):
The backup is complete and ready to use.
NOT_AVAILABLE (3):
The backup is not available at this moment.
"""
STATE_UNSPECIFIED = 0
CREATING = 1
READY = 2
NOT_AVAILABLE = 3
class Stats(proto.Message):
r"""Backup specific statistics.
Attributes:
size_bytes (int):
Output only. Summation of the size of all
documents and index entries in the backup,
measured in bytes.
document_count (int):
Output only. The total number of documents
contained in the backup.
index_count (int):
Output only. The total number of index
entries contained in the backup.
"""
size_bytes: int = proto.Field(
proto.INT64,
number=1,
)
document_count: int = proto.Field(
proto.INT64,
number=2,
)
index_count: int = proto.Field(
proto.INT64,
number=3,
)
name: str = proto.Field(
proto.STRING,
number=1,
)
database: str = proto.Field(
proto.STRING,
number=2,
)
database_uid: str = proto.Field(
proto.STRING,
number=7,
)
snapshot_time: timestamp_pb2.Timestamp = proto.Field(
proto.MESSAGE,
number=3,
message=timestamp_pb2.Timestamp,
)
expire_time: timestamp_pb2.Timestamp = proto.Field(
proto.MESSAGE,
number=4,
message=timestamp_pb2.Timestamp,
)
stats: Stats = proto.Field(
proto.MESSAGE,
number=6,
message=Stats,
)
state: State = proto.Field(
proto.ENUM,
number=8,
enum=State,
)
__all__ = tuple(sorted(__protobuf__.manifest))

View File

@@ -0,0 +1,543 @@
# -*- coding: utf-8 -*-
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import annotations
from typing import MutableMapping, MutableSequence
import proto # type: ignore
from google.protobuf import duration_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.firestore.admin.v1",
manifest={
"Database",
},
)
class Database(proto.Message):
r"""A Cloud Firestore Database.
.. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
Attributes:
name (str):
The resource name of the Database. Format:
``projects/{project}/databases/{database}``
uid (str):
Output only. The system-generated UUID4 for
this Database.
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The timestamp at which this database was
created. Databases created before 2016 do not populate
create_time.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The timestamp at which this
database was most recently updated. Note this
only includes updates to the database resource
and not data contained by the database.
delete_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The timestamp at which this
database was deleted. Only set if the database
has been deleted.
location_id (str):
The location of the database. Available
locations are listed at
https://cloud.google.com/firestore/docs/locations.
type_ (google.cloud.firestore_admin_v1.types.Database.DatabaseType):
The type of the database.
See
https://cloud.google.com/datastore/docs/firestore-or-datastore
for information about how to choose.
concurrency_mode (google.cloud.firestore_admin_v1.types.Database.ConcurrencyMode):
The concurrency control mode to use for this
database.
version_retention_period (google.protobuf.duration_pb2.Duration):
Output only. The period during which past versions of data
are retained in the database.
Any [read][google.firestore.v1.GetDocumentRequest.read_time]
or
[query][google.firestore.v1.ListDocumentsRequest.read_time]
can specify a ``read_time`` within this window, and will
read the state of the database at that time.
If the PITR feature is enabled, the retention period is 7
days. Otherwise, the retention period is 1 hour.
earliest_version_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The earliest timestamp at which older versions
of the data can be read from the database. See
[version_retention_period] above; this field is populated
with ``now - version_retention_period``.
This value is continuously updated, and becomes stale the
moment it is queried. If you are using this value to recover
data, make sure to account for the time from the moment when
the value is queried to the moment when you initiate the
recovery.
point_in_time_recovery_enablement (google.cloud.firestore_admin_v1.types.Database.PointInTimeRecoveryEnablement):
Whether to enable the PITR feature on this
database.
app_engine_integration_mode (google.cloud.firestore_admin_v1.types.Database.AppEngineIntegrationMode):
The App Engine integration mode to use for
this database.
key_prefix (str):
Output only. The key_prefix for this database. This
key_prefix is used, in combination with the project ID ("~")
to construct the application ID that is returned from the
Cloud Datastore APIs in Google App Engine first generation
runtimes.
This value may be empty in which case the appid to use for
URL-encoded keys is the project_id (eg: foo instead of
v~foo).
delete_protection_state (google.cloud.firestore_admin_v1.types.Database.DeleteProtectionState):
State of delete protection for the database.
cmek_config (google.cloud.firestore_admin_v1.types.Database.CmekConfig):
Optional. Presence indicates CMEK is enabled
for this database.
previous_id (str):
Output only. The database resource's prior
database ID. This field is only populated for
deleted databases.
source_info (google.cloud.firestore_admin_v1.types.Database.SourceInfo):
Output only. Information about the provenance
of this database.
free_tier (bool):
Output only. Background: Free tier is the
ability of a Firestore database to use a small
amount of resources every day without being
charged. Once usage exceeds the free tier limit
further usage is charged.
Whether this database can make use of the free
tier. Only one database per project can be
eligible for the free tier.
The first (or next) database that is created in
a project without a free tier database will be
marked as eligible for the free tier. Databases
that are created while there is a free tier
database will not be eligible for the free tier.
This field is a member of `oneof`_ ``_free_tier``.
etag (str):
This checksum is computed by the server based
on the value of other fields, and may be sent on
update and delete requests to ensure the client
has an up-to-date value before proceeding.
database_edition (google.cloud.firestore_admin_v1.types.Database.DatabaseEdition):
Immutable. The edition of the database.
"""
class DatabaseType(proto.Enum):
r"""The type of the database.
See
https://cloud.google.com/datastore/docs/firestore-or-datastore
for information about how to choose.
Mode changes are only allowed if the database is empty.
Values:
DATABASE_TYPE_UNSPECIFIED (0):
Not used.
FIRESTORE_NATIVE (1):
Firestore Native Mode
DATASTORE_MODE (2):
Firestore in Datastore Mode.
"""
DATABASE_TYPE_UNSPECIFIED = 0
FIRESTORE_NATIVE = 1
DATASTORE_MODE = 2
class ConcurrencyMode(proto.Enum):
r"""The type of concurrency control mode for transactions.
Values:
CONCURRENCY_MODE_UNSPECIFIED (0):
Not used.
OPTIMISTIC (1):
Use optimistic concurrency control by
default. This mode is available for Cloud
Firestore databases.
PESSIMISTIC (2):
Use pessimistic concurrency control by
default. This mode is available for Cloud
Firestore databases.
This is the default setting for Cloud Firestore.
OPTIMISTIC_WITH_ENTITY_GROUPS (3):
Use optimistic concurrency control with
entity groups by default.
This is the only available mode for Cloud
Datastore.
This mode is also available for Cloud Firestore
with Datastore Mode but is not recommended.
"""
CONCURRENCY_MODE_UNSPECIFIED = 0
OPTIMISTIC = 1
PESSIMISTIC = 2
OPTIMISTIC_WITH_ENTITY_GROUPS = 3
class PointInTimeRecoveryEnablement(proto.Enum):
r"""Point In Time Recovery feature enablement.
Values:
POINT_IN_TIME_RECOVERY_ENABLEMENT_UNSPECIFIED (0):
Not used.
POINT_IN_TIME_RECOVERY_ENABLED (1):
Reads are supported on selected versions of the data from
within the past 7 days:
- Reads against any timestamp within the past hour
- Reads against 1-minute snapshots beyond 1 hour and within
7 days
``version_retention_period`` and ``earliest_version_time``
can be used to determine the supported versions.
POINT_IN_TIME_RECOVERY_DISABLED (2):
Reads are supported on any version of the
data from within the past 1 hour.
"""
POINT_IN_TIME_RECOVERY_ENABLEMENT_UNSPECIFIED = 0
POINT_IN_TIME_RECOVERY_ENABLED = 1
POINT_IN_TIME_RECOVERY_DISABLED = 2
class AppEngineIntegrationMode(proto.Enum):
r"""The type of App Engine integration mode.
Values:
APP_ENGINE_INTEGRATION_MODE_UNSPECIFIED (0):
Not used.
ENABLED (1):
If an App Engine application exists in the
same region as this database, App Engine
configuration will impact this database. This
includes disabling of the application &
database, as well as disabling writes to the
database.
DISABLED (2):
App Engine has no effect on the ability of
this database to serve requests.
This is the default setting for databases
created with the Firestore API.
"""
APP_ENGINE_INTEGRATION_MODE_UNSPECIFIED = 0
ENABLED = 1
DISABLED = 2
class DeleteProtectionState(proto.Enum):
r"""The delete protection state of the database.
Values:
DELETE_PROTECTION_STATE_UNSPECIFIED (0):
The default value. Delete protection type is
not specified
DELETE_PROTECTION_DISABLED (1):
Delete protection is disabled
DELETE_PROTECTION_ENABLED (2):
Delete protection is enabled
"""
DELETE_PROTECTION_STATE_UNSPECIFIED = 0
DELETE_PROTECTION_DISABLED = 1
DELETE_PROTECTION_ENABLED = 2
class DatabaseEdition(proto.Enum):
r"""The edition of the database.
Values:
DATABASE_EDITION_UNSPECIFIED (0):
Not used.
STANDARD (1):
Standard edition.
This is the default setting if not specified.
ENTERPRISE (2):
Enterprise edition.
"""
DATABASE_EDITION_UNSPECIFIED = 0
STANDARD = 1
ENTERPRISE = 2
class CmekConfig(proto.Message):
r"""The CMEK (Customer Managed Encryption Key) configuration for
a Firestore database. If not present, the database is secured by
the default Google encryption key.
Attributes:
kms_key_name (str):
Required. Only keys in the same location as this database
are allowed to be used for encryption.
For Firestore's nam5 multi-region, this corresponds to Cloud
KMS multi-region us. For Firestore's eur3 multi-region, this
corresponds to Cloud KMS multi-region europe. See
https://cloud.google.com/kms/docs/locations.
The expected format is
``projects/{project_id}/locations/{kms_location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}``.
active_key_version (MutableSequence[str]):
Output only. Currently in-use `KMS key
versions <https://cloud.google.com/kms/docs/resource-hierarchy#key_versions>`__.
During `key
rotation <https://cloud.google.com/kms/docs/key-rotation>`__,
there can be multiple in-use key versions.
The expected format is
``projects/{project_id}/locations/{kms_location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{key_version}``.
"""
kms_key_name: str = proto.Field(
proto.STRING,
number=1,
)
active_key_version: MutableSequence[str] = proto.RepeatedField(
proto.STRING,
number=2,
)
class SourceInfo(proto.Message):
r"""Information about the provenance of this database.
.. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
Attributes:
backup (google.cloud.firestore_admin_v1.types.Database.SourceInfo.BackupSource):
If set, this database was restored from the
specified backup (or a snapshot thereof).
This field is a member of `oneof`_ ``source``.
operation (str):
The associated long-running operation. This field may not be
set after the operation has completed. Format:
``projects/{project}/databases/{database}/operations/{operation}``.
"""
class BackupSource(proto.Message):
r"""Information about a backup that was used to restore a
database.
Attributes:
backup (str):
The resource name of the backup that was used to restore
this database. Format:
``projects/{project}/locations/{location}/backups/{backup}``.
"""
backup: str = proto.Field(
proto.STRING,
number=1,
)
backup: "Database.SourceInfo.BackupSource" = proto.Field(
proto.MESSAGE,
number=1,
oneof="source",
message="Database.SourceInfo.BackupSource",
)
operation: str = proto.Field(
proto.STRING,
number=3,
)
class EncryptionConfig(proto.Message):
r"""Encryption configuration for a new database being created from
another source.
The source could be a [Backup][google.firestore.admin.v1.Backup] .
This message has `oneof`_ fields (mutually exclusive fields).
For each oneof, at most one member field can be set at the same time.
Setting any member of the oneof automatically clears all other
members.
.. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
Attributes:
google_default_encryption (google.cloud.firestore_admin_v1.types.Database.EncryptionConfig.GoogleDefaultEncryptionOptions):
Use Google default encryption.
This field is a member of `oneof`_ ``encryption_type``.
use_source_encryption (google.cloud.firestore_admin_v1.types.Database.EncryptionConfig.SourceEncryptionOptions):
The database will use the same encryption
configuration as the source.
This field is a member of `oneof`_ ``encryption_type``.
customer_managed_encryption (google.cloud.firestore_admin_v1.types.Database.EncryptionConfig.CustomerManagedEncryptionOptions):
Use Customer Managed Encryption Keys (CMEK)
for encryption.
This field is a member of `oneof`_ ``encryption_type``.
"""
class GoogleDefaultEncryptionOptions(proto.Message):
r"""The configuration options for using Google default
encryption.
"""
class SourceEncryptionOptions(proto.Message):
r"""The configuration options for using the same encryption
method as the source.
"""
class CustomerManagedEncryptionOptions(proto.Message):
r"""The configuration options for using CMEK (Customer Managed
Encryption Key) encryption.
Attributes:
kms_key_name (str):
Required. Only keys in the same location as the database are
allowed to be used for encryption.
For Firestore's nam5 multi-region, this corresponds to Cloud
KMS multi-region us. For Firestore's eur3 multi-region, this
corresponds to Cloud KMS multi-region europe. See
https://cloud.google.com/kms/docs/locations.
The expected format is
``projects/{project_id}/locations/{kms_location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}``.
"""
kms_key_name: str = proto.Field(
proto.STRING,
number=1,
)
google_default_encryption: "Database.EncryptionConfig.GoogleDefaultEncryptionOptions" = proto.Field(
proto.MESSAGE,
number=1,
oneof="encryption_type",
message="Database.EncryptionConfig.GoogleDefaultEncryptionOptions",
)
use_source_encryption: "Database.EncryptionConfig.SourceEncryptionOptions" = (
proto.Field(
proto.MESSAGE,
number=2,
oneof="encryption_type",
message="Database.EncryptionConfig.SourceEncryptionOptions",
)
)
customer_managed_encryption: "Database.EncryptionConfig.CustomerManagedEncryptionOptions" = proto.Field(
proto.MESSAGE,
number=3,
oneof="encryption_type",
message="Database.EncryptionConfig.CustomerManagedEncryptionOptions",
)
name: str = proto.Field(
proto.STRING,
number=1,
)
uid: str = proto.Field(
proto.STRING,
number=3,
)
create_time: timestamp_pb2.Timestamp = proto.Field(
proto.MESSAGE,
number=5,
message=timestamp_pb2.Timestamp,
)
update_time: timestamp_pb2.Timestamp = proto.Field(
proto.MESSAGE,
number=6,
message=timestamp_pb2.Timestamp,
)
delete_time: timestamp_pb2.Timestamp = proto.Field(
proto.MESSAGE,
number=7,
message=timestamp_pb2.Timestamp,
)
location_id: str = proto.Field(
proto.STRING,
number=9,
)
type_: DatabaseType = proto.Field(
proto.ENUM,
number=10,
enum=DatabaseType,
)
concurrency_mode: ConcurrencyMode = proto.Field(
proto.ENUM,
number=15,
enum=ConcurrencyMode,
)
version_retention_period: duration_pb2.Duration = proto.Field(
proto.MESSAGE,
number=17,
message=duration_pb2.Duration,
)
earliest_version_time: timestamp_pb2.Timestamp = proto.Field(
proto.MESSAGE,
number=18,
message=timestamp_pb2.Timestamp,
)
point_in_time_recovery_enablement: PointInTimeRecoveryEnablement = proto.Field(
proto.ENUM,
number=21,
enum=PointInTimeRecoveryEnablement,
)
app_engine_integration_mode: AppEngineIntegrationMode = proto.Field(
proto.ENUM,
number=19,
enum=AppEngineIntegrationMode,
)
key_prefix: str = proto.Field(
proto.STRING,
number=20,
)
delete_protection_state: DeleteProtectionState = proto.Field(
proto.ENUM,
number=22,
enum=DeleteProtectionState,
)
cmek_config: CmekConfig = proto.Field(
proto.MESSAGE,
number=23,
message=CmekConfig,
)
previous_id: str = proto.Field(
proto.STRING,
number=25,
)
source_info: SourceInfo = proto.Field(
proto.MESSAGE,
number=26,
message=SourceInfo,
)
free_tier: bool = proto.Field(
proto.BOOL,
number=30,
optional=True,
)
etag: str = proto.Field(
proto.STRING,
number=99,
)
database_edition: DatabaseEdition = proto.Field(
proto.ENUM,
number=28,
enum=DatabaseEdition,
)
__all__ = tuple(sorted(__protobuf__.manifest))

Some files were not shown because too many files have changed in this diff Show More