Skip to content

Commit

Permalink
chore(docs): add execute query docs pages (#1014)
Browse files Browse the repository at this point in the history
  • Loading branch information
daniel-sanche authored Aug 9, 2024
1 parent 4cf88dc commit 678a06c
Show file tree
Hide file tree
Showing 8 changed files with 98 additions and 31 deletions.
6 changes: 6 additions & 0 deletions docs/async_data_client/async_data_execute_query_iterator.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Execute Query Iterator Async
~~~~~~~~~~~~~~~~~~~~~~~~~~~~

.. autoclass:: google.cloud.bigtable.data.execute_query.ExecuteQueryIteratorAsync
:members:
:show-inheritance:
6 changes: 6 additions & 0 deletions docs/async_data_client/async_data_execute_query_metadata.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Execute Query Metadata
~~~~~~~~~~~~~~~~~~~~~~~~~~

.. automodule:: google.cloud.bigtable.data.execute_query.metadata
:members:
:show-inheritance:
6 changes: 6 additions & 0 deletions docs/async_data_client/async_data_execute_query_values.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Execute Query Values
~~~~~~~~~~~~~~~~~~~~

.. automodule:: google.cloud.bigtable.data.execute_query.values
:members:
:show-inheritance:
3 changes: 3 additions & 0 deletions docs/async_data_client/async_data_usage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,6 @@ Async Data Client
async_data_mutations
async_data_read_modify_write_rules
async_data_exceptions
async_data_execute_query_iterator
async_data_execute_query_values
async_data_execute_query_metadata
22 changes: 11 additions & 11 deletions google/cloud/bigtable/data/_async/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -456,38 +456,38 @@ async def execute_query(
retryable_errors list until operation_timeout is reached.
Args:
- query: Query to be run on Bigtable instance. The query can use ``@param``
query: Query to be run on Bigtable instance. The query can use ``@param``
placeholders to use parameter interpolation on the server. Values for all
parameters should be provided in ``parameters``. Types of parameters are
inferred but should be provided in ``parameter_types`` if the inference is
not possible (i.e. when value can be None, an empty list or an empty dict).
- instance_id: The Bigtable instance ID to perform the query on.
instance_id: The Bigtable instance ID to perform the query on.
instance_id is combined with the client's project to fully
specify the instance.
- parameters: Dictionary with values for all parameters used in the ``query``.
- parameter_types: Dictionary with types of parameters used in the ``query``.
parameters: Dictionary with values for all parameters used in the ``query``.
parameter_types: Dictionary with types of parameters used in the ``query``.
Required to contain entries only for parameters whose type cannot be
detected automatically (i.e. the value can be None, an empty list or
an empty dict).
- app_profile_id: The app profile to associate with requests.
app_profile_id: The app profile to associate with requests.
https://cloud.google.com/bigtable/docs/app-profiles
- operation_timeout: the time budget for the entire operation, in seconds.
operation_timeout: the time budget for the entire operation, in seconds.
Failed requests will be retried within the budget.
Defaults to 600 seconds.
- attempt_timeout: the time budget for an individual network request, in seconds.
attempt_timeout: the time budget for an individual network request, in seconds.
If it takes longer than this time to complete, the request will be cancelled with
a DeadlineExceeded exception, and a retry will be attempted.
Defaults to the 20 seconds.
If None, defaults to operation_timeout.
- retryable_errors: a list of errors that will be retried if encountered.
retryable_errors: a list of errors that will be retried if encountered.
Defaults to 4 (DeadlineExceeded), 14 (ServiceUnavailable), and 10 (Aborted)
Returns:
- an asynchronous iterator that yields rows returned by the query
ExecuteQueryIteratorAsync: an asynchronous iterator that yields rows returned by the query
Raises:
- DeadlineExceeded: raised after operation timeout
google.api_core.exceptions.DeadlineExceeded: raised after operation timeout
will be chained with a RetryExceptionGroup containing GoogleAPIError exceptions
from any retries that failed
- GoogleAPIError: raised if the request encounters an unrecoverable error
google.api_core.exceptions.GoogleAPIError: raised if the request encounters an unrecoverable error
"""
warnings.warn(
"ExecuteQuery is in preview and may change in the future.",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
Optional,
Sequence,
Tuple,
TYPE_CHECKING,
)

from google.api_core import retry as retries
Expand All @@ -43,35 +44,40 @@
ExecuteQueryRequest as ExecuteQueryRequestPB,
)

if TYPE_CHECKING:
from google.cloud.bigtable.data import BigtableDataClientAsync


class ExecuteQueryIteratorAsync:
"""
ExecuteQueryIteratorAsync handles collecting streaming responses from the
ExecuteQuery RPC and parsing them to `QueryResultRow`s.
ExecuteQuery RPC and parsing them to QueryResultRows.
ExecuteQueryIteratorAsync implements Asynchronous Iterator interface and can
be used with "async for" syntax. It is also a context manager.
It is **not thread-safe**. It should not be used by multiple asyncio Tasks.
Args:
client (google.cloud.bigtable.data._async.BigtableDataClientAsync): bigtable client
instance_id (str): id of the instance on which the query is executed
request_body (Dict[str, Any]): dict representing the body of the ExecuteQueryRequest
attempt_timeout (float | None): the time budget for the entire operation, in seconds.
Failed requests will be retried within the budget.
Defaults to 600 seconds.
operation_timeout (float): the time budget for an individual network request, in seconds.
If it takes longer than this time to complete, the request will be cancelled with
a DeadlineExceeded exception, and a retry will be attempted.
Defaults to the 20 seconds. If None, defaults to operation_timeout.
req_metadata (Sequence[Tuple[str, str]]): metadata used while sending the gRPC request
retryable_excs (List[type[Exception]]): a list of errors that will be retried if encountered.
client: bigtable client
instance_id: id of the instance on which the query is executed
request_body: dict representing the body of the ExecuteQueryRequest
attempt_timeout: the time budget for the entire operation, in seconds.
Failed requests will be retried within the budget.
Defaults to 600 seconds.
operation_timeout: the time budget for an individual network request, in seconds.
If it takes longer than this time to complete, the request will be cancelled with
a DeadlineExceeded exception, and a retry will be attempted.
Defaults to the 20 seconds. If None, defaults to operation_timeout.
req_metadata: metadata used while sending the gRPC request
retryable_excs: a list of errors that will be retried if encountered.
Raises:
RuntimeError: if the instance is not created within an async event loop context.
"""

def __init__(
self,
client: Any,
client: BigtableDataClientAsync,
instance_id: str,
app_profile_id: Optional[str],
request_body: Dict[str, Any],
Expand Down Expand Up @@ -112,15 +118,18 @@ def __init__(
) from e

@property
def is_closed(self):
def is_closed(self) -> bool:
"""Returns True if the iterator is closed, False otherwise."""
return self._is_closed

@property
def app_profile_id(self):
def app_profile_id(self) -> Optional[str]:
"""Returns the app_profile_id of the iterator."""
return self._app_profile_id

@property
def table_name(self):
def table_name(self) -> Optional[str]:
"""Returns the table_name of the iterator."""
return self._table_name

async def _make_request_with_resume_token(self):
Expand Down Expand Up @@ -176,7 +185,7 @@ async def _next_impl(self) -> AsyncIterator[QueryResultRow]:
yield result
await self.close()

async def __anext__(self):
async def __anext__(self) -> QueryResultRow:
if self._is_closed:
raise StopAsyncIteration
return await self._result_generator.__anext__()
Expand Down
33 changes: 33 additions & 0 deletions google/cloud/bigtable/data/execute_query/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,8 @@ def __repr__(self) -> str:
return self.__str__()

class Struct(_NamedList[Type], Type):
"""Struct SQL type."""

@classmethod
def from_pb_type(cls, type_pb: Optional[PBType] = None) -> "SqlType.Struct":
if type_pb is None:
Expand Down Expand Up @@ -120,6 +122,8 @@ def __str__(self):
return super(_NamedList, self).__str__()

class Array(Type):
"""Array SQL type."""

def __init__(self, element_type: "SqlType.Type"):
if isinstance(element_type, SqlType.Array):
raise ValueError("Arrays of arrays are not supported.")
Expand Down Expand Up @@ -148,6 +152,8 @@ def __str__(self) -> str:
return f"{self.__class__.__name__}<{str(self.element_type)}>"

class Map(Type):
"""Map SQL type."""

def __init__(self, key_type: "SqlType.Type", value_type: "SqlType.Type"):
self._key_type = key_type
self._value_type = value_type
Expand Down Expand Up @@ -189,32 +195,44 @@ def __str__(self) -> str:
)

class Bytes(Type):
"""Bytes SQL type."""

expected_type = bytes
value_pb_dict_field_name = "bytes_value"
type_field_name = "bytes_type"

class String(Type):
"""String SQL type."""

expected_type = str
value_pb_dict_field_name = "string_value"
type_field_name = "string_type"

class Int64(Type):
"""Int64 SQL type."""

expected_type = int
value_pb_dict_field_name = "int_value"
type_field_name = "int64_type"

class Float64(Type):
"""Float64 SQL type."""

expected_type = float
value_pb_dict_field_name = "float_value"
type_field_name = "float64_type"

class Bool(Type):
"""Bool SQL type."""

expected_type = bool
value_pb_dict_field_name = "bool_value"
type_field_name = "bool_type"

class Timestamp(Type):
"""
Timestamp SQL type.
Timestamp supports :class:`DatetimeWithNanoseconds` but Bigtable SQL does
not currently support nanoseconds precision. We support this for potential
compatibility in the future. Nanoseconds are currently ignored.
Expand Down Expand Up @@ -243,6 +261,8 @@ def _to_value_pb_dict(self, value: Any) -> Dict[str, Any]:
return {"timestamp_value": ts}

class Date(Type):
"""Date SQL type."""

type_field_name = "date_type"
expected_type = datetime.date

Expand All @@ -265,10 +285,23 @@ def _to_value_pb_dict(self, value: Any) -> Dict[str, Any]:


class Metadata:
"""
Base class for metadata returned by the ExecuteQuery operation.
"""

pass


class ProtoMetadata(Metadata):
"""
Metadata class for the ExecuteQuery operation.
Args:
columns (List[Tuple[Optional[str], SqlType.Type]]): List of column
metadata tuples. Each tuple contains the column name and the column
type.
"""

class Column:
def __init__(self, column_name: Optional[str], column_type: SqlType.Type):
self._column_name = column_name
Expand Down
8 changes: 6 additions & 2 deletions google/cloud/bigtable/data/execute_query/values.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,12 @@ def __repr__(self) -> str:


class QueryResultRow(_NamedList[ExecuteQueryValueType]):
pass
"""
Represents a single row of the result
"""


class Struct(_NamedList[ExecuteQueryValueType]):
pass
"""
Represents a struct value in the result
"""

0 comments on commit 678a06c

Please sign in to comment.