Skip to content

Commit

Permalink
feat: add generated sync client (#1017)
Browse files Browse the repository at this point in the history
  • Loading branch information
daniel-sanche authored Dec 12, 2024
1 parent 1193530 commit f974823
Show file tree
Hide file tree
Showing 54 changed files with 8,448 additions and 55 deletions.
2 changes: 1 addition & 1 deletion .cross_sync/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ CrossSync provides a set of annotations to mark up async classes, to guide the g

### Code Generation

Generation can be initiated using `python .cross_sync/generate.py .`
Generation can be initiated using `nox -s generate_sync`
from the root of the project. This will find all classes with the `__CROSS_SYNC_OUTPUT__ = "path/to/output"`
annotation, and generate a sync version of classes marked with `@CrossSync.convert_sync` at the output path.

Expand Down
18 changes: 0 additions & 18 deletions docs/async_data_client/async_data_usage.rst

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ Bigtable Data Client Async
performance benefits, the codebase should be designed to be async from the ground up.


.. autoclass:: google.cloud.bigtable.data._async.client.BigtableDataClientAsync
.. autoclass:: google.cloud.bigtable.data.BigtableDataClientAsync
:members:
:show-inheritance:
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
39 changes: 39 additions & 0 deletions docs/data_client/data_client_usage.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
Data Client
===========

Sync Surface
------------

.. toctree::
:maxdepth: 3

sync_data_client
sync_data_table
sync_data_mutations_batcher
sync_data_execute_query_iterator

Async Surface
-------------

.. toctree::
:maxdepth: 3

async_data_client
async_data_table
async_data_mutations_batcher
async_data_execute_query_iterator

Common Classes
--------------

.. toctree::
:maxdepth: 3

common_data_read_rows_query
common_data_row
common_data_row_filters
common_data_mutations
common_data_read_modify_write_rules
common_data_exceptions
common_data_execute_query_values
common_data_execute_query_metadata
6 changes: 6 additions & 0 deletions docs/data_client/sync_data_client.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Bigtable Data Client
~~~~~~~~~~~~~~~~~~~~

.. autoclass:: google.cloud.bigtable.data.BigtableDataClient
:members:
:show-inheritance:
6 changes: 6 additions & 0 deletions docs/data_client/sync_data_execute_query_iterator.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Execute Query Iterator
~~~~~~~~~~~~~~~~~~~~~~

.. autoclass:: google.cloud.bigtable.data.execute_query.ExecuteQueryIterator
:members:
:show-inheritance:
6 changes: 6 additions & 0 deletions docs/data_client/sync_data_mutations_batcher.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Mutations Batcher
~~~~~~~~~~~~~~~~~

.. automodule:: google.cloud.bigtable.data._sync_autogen.mutations_batcher
:members:
:show-inheritance:
6 changes: 6 additions & 0 deletions docs/data_client/sync_data_table.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Table
~~~~~

.. autoclass:: google.cloud.bigtable.data.Table
:members:
:show-inheritance:
4 changes: 2 additions & 2 deletions docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
Client Types
-------------
.. toctree::
:maxdepth: 2
:maxdepth: 3

data_client/data_client_usage
classic_client/usage
async_data_client/async_data_usage


Changelog
Expand Down
9 changes: 4 additions & 5 deletions docs/scripts/patch_devsite_toc.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,8 @@ def __init__(self, dir_name, index_file_name):
continue
# bail when toc indented block is done
if not line.startswith(" ") and not line.startswith("\t"):
break
in_toc = False
continue
# extract entries
self.items.append(self.extract_toc_entry(line.strip()))

Expand Down Expand Up @@ -194,9 +195,7 @@ def validate_toc(toc_file_path, expected_section_list, added_sections):
# Add secrtions for the async_data_client and classic_client directories
toc_path = "_build/html/docfx_yaml/toc.yml"
custom_sections = [
TocSection(
dir_name="async_data_client", index_file_name="async_data_usage.rst"
),
TocSection(dir_name="data_client", index_file_name="data_client_usage.rst"),
TocSection(dir_name="classic_client", index_file_name="usage.rst"),
]
add_sections(toc_path, custom_sections)
Expand All @@ -210,7 +209,7 @@ def validate_toc(toc_file_path, expected_section_list, added_sections):
"bigtable APIs",
"Changelog",
"Multiprocessing",
"Async Data Client",
"Data Client",
"Classic Client",
],
added_sections=custom_sections,
Expand Down
18 changes: 16 additions & 2 deletions google/cloud/bigtable/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,10 @@

from google.cloud.bigtable.data._async.client import BigtableDataClientAsync
from google.cloud.bigtable.data._async.client import TableAsync

from google.cloud.bigtable.data._async.mutations_batcher import MutationsBatcherAsync
from google.cloud.bigtable.data._sync_autogen.client import BigtableDataClient
from google.cloud.bigtable.data._sync_autogen.client import Table
from google.cloud.bigtable.data._sync_autogen.mutations_batcher import MutationsBatcher

from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
from google.cloud.bigtable.data.read_rows_query import RowRange
Expand Down Expand Up @@ -52,20 +54,32 @@
from google.cloud.bigtable.data._async._read_rows import _ReadRowsOperationAsync
from google.cloud.bigtable.data._async._mutate_rows import _MutateRowsOperationAsync

from google.cloud.bigtable_v2.services.bigtable.client import (
BigtableClient,
)
from google.cloud.bigtable.data._sync_autogen._read_rows import _ReadRowsOperation
from google.cloud.bigtable.data._sync_autogen._mutate_rows import _MutateRowsOperation

from google.cloud.bigtable.data._cross_sync import CrossSync

CrossSync.add_mapping("GapicClient", BigtableAsyncClient)
CrossSync._Sync_Impl.add_mapping("GapicClient", BigtableClient)
CrossSync.add_mapping("_ReadRowsOperation", _ReadRowsOperationAsync)
CrossSync._Sync_Impl.add_mapping("_ReadRowsOperation", _ReadRowsOperation)
CrossSync.add_mapping("_MutateRowsOperation", _MutateRowsOperationAsync)
CrossSync._Sync_Impl.add_mapping("_MutateRowsOperation", _MutateRowsOperation)
CrossSync.add_mapping("MutationsBatcher", MutationsBatcherAsync)

CrossSync._Sync_Impl.add_mapping("MutationsBatcher", MutationsBatcher)

__version__: str = package_version.__version__

__all__ = (
"BigtableDataClientAsync",
"TableAsync",
"MutationsBatcherAsync",
"BigtableDataClient",
"Table",
"MutationsBatcher",
"RowKeySamples",
"ReadRowsQuery",
"RowRange",
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/bigtable/data/_async/_mutate_rows.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2023 Google LLC
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/bigtable/data/_async/_read_rows.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2023 Google LLC
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down
11 changes: 10 additions & 1 deletion google/cloud/bigtable/data/_async/client.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2023 Google LLC
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -85,8 +85,10 @@
)
from google.cloud.bigtable.data._async.mutations_batcher import _MB_SIZE
else:
from typing import Iterable # noqa: F401
from grpc import insecure_channel
from google.cloud.bigtable_v2.services.bigtable.transports import BigtableGrpcTransport as TransportType # type: ignore
from google.cloud.bigtable.data._sync_autogen.mutations_batcher import _MB_SIZE


if TYPE_CHECKING:
Expand All @@ -100,6 +102,13 @@
from google.cloud.bigtable.data.execute_query._async.execute_query_iterator import (
ExecuteQueryIteratorAsync,
)
else:
from google.cloud.bigtable.data._sync_autogen.mutations_batcher import ( # noqa: F401
MutationsBatcher,
)
from google.cloud.bigtable.data.execute_query._sync_autogen.execute_query_iterator import ( # noqa: F401
ExecuteQueryIterator,
)


__CROSS_SYNC_OUTPUT__ = "google.cloud.bigtable.data._sync_autogen.client"
Expand Down
8 changes: 4 additions & 4 deletions google/cloud/bigtable/data/_async/mutations_batcher.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2023 Google LLC
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -14,7 +14,7 @@
#
from __future__ import annotations

from typing import Sequence, TYPE_CHECKING
from typing import Sequence, TYPE_CHECKING, cast
import atexit
import warnings
from collections import deque
Expand Down Expand Up @@ -250,7 +250,7 @@ def __init__(
)
# used by sync class to manage flush_internal tasks
self._sync_flush_executor = (
concurrent.futures.ThreadPoolExecutor(max_workers=1)
concurrent.futures.ThreadPoolExecutor(max_workers=4)
if not CrossSync.is_async
else None
)
Expand Down Expand Up @@ -305,7 +305,7 @@ async def append(self, mutation_entry: RowMutationEntry):
# TODO: return a future to track completion of this entry
if self._closed.is_set():
raise RuntimeError("Cannot append to closed MutationsBatcher")
if isinstance(mutation_entry, Mutation): # type: ignore
if isinstance(cast(Mutation, mutation_entry), Mutation):
raise ValueError(
f"invalid mutation type: {type(mutation_entry).__name__}. Only RowMutationEntry objects are supported by batcher"
)
Expand Down
5 changes: 3 additions & 2 deletions google/cloud/bigtable/data/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
if TYPE_CHECKING:
import grpc
from google.cloud.bigtable.data import TableAsync
from google.cloud.bigtable.data import Table

"""
Helper functions used in various places in the library.
Expand Down Expand Up @@ -120,7 +121,7 @@ def _retry_exception_factory(
def _get_timeouts(
operation: float | TABLE_DEFAULT,
attempt: float | None | TABLE_DEFAULT,
table: "TableAsync",
table: "TableAsync" | "Table",
) -> tuple[float, float]:
"""
Convert passed in timeout values to floats, using table defaults if necessary.
Expand Down Expand Up @@ -207,7 +208,7 @@ def _get_error_type(

def _get_retryable_errors(
call_codes: Sequence["grpc.StatusCode" | int | type[Exception]] | TABLE_DEFAULT,
table: "TableAsync",
table: "TableAsync" | "Table",
) -> list[type[Exception]]:
"""
Convert passed in retryable error codes to a list of exception types.
Expand Down
Loading

0 comments on commit f974823

Please sign in to comment.