diff --git a/.coveragerc b/.coveragerc index 32fdaa09..303a6ecb 100644 --- a/.coveragerc +++ b/.coveragerc @@ -18,6 +18,7 @@ [run] branch = True omit = + google/__init__.py sqlalchemy_bigquery/requirements.py [report] diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 7b6cc310..cb89b2e3 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:a3a85c2e0b3293068e47b1635b178f7e3d3845f2cfb8722de6713d4bbafdcd1d + digest: sha256:ec49167c606648a063d1222220b48119c912562849a0528f35bfb592a9f72737 diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index 79b76dcd..07ec62b9 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -30,7 +30,9 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - value: "docs-staging-v2" + # Push non-cloud library docs to `docs-staging-v2-staging` instead of the + # Cloud RAD bucket `docs-staging-v2` + value: "docs-staging-v2-staging" } # It will upload the docker image after successful builds. diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg index 6b2a5f39..d568248d 100644 --- a/.kokoro/samples/lint/common.cfg +++ b/.kokoro/samples/lint/common.cfg @@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.10/common.cfg b/.kokoro/samples/python3.10/common.cfg new file mode 100644 index 00000000..925c60c9 --- /dev/null +++ b/.kokoro/samples/python3.10/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.10" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-310" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.10/continuous.cfg b/.kokoro/samples/python3.10/continuous.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.10/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.10/periodic-head.cfg b/.kokoro/samples/python3.10/periodic-head.cfg new file mode 100644 index 00000000..abf3481d --- /dev/null +++ b/.kokoro/samples/python3.10/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.10/periodic.cfg b/.kokoro/samples/python3.10/periodic.cfg new file mode 100644 index 00000000..71cd1e59 --- /dev/null +++ b/.kokoro/samples/python3.10/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.10/presubmit.cfg b/.kokoro/samples/python3.10/presubmit.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.10/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg index 3000e725..5cfb556d 100644 --- a/.kokoro/samples/python3.6/common.cfg +++ b/.kokoro/samples/python3.6/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg index 50fec964..71cd1e59 100644 --- a/.kokoro/samples/python3.6/periodic.cfg +++ b/.kokoro/samples/python3.6/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index b3f94782..e5fe0226 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg index 50fec964..71cd1e59 100644 --- a/.kokoro/samples/python3.7/periodic.cfg +++ b/.kokoro/samples/python3.7/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index 6a5ffade..e8cc035c 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg index 50fec964..71cd1e59 100644 --- a/.kokoro/samples/python3.8/periodic.cfg +++ b/.kokoro/samples/python3.8/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg index 7e1b46d3..692eb211 100644 --- a/.kokoro/samples/python3.9/common.cfg +++ b/.kokoro/samples/python3.9/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg index 50fec964..71cd1e59 100644 --- a/.kokoro/samples/python3.9/periodic.cfg +++ b/.kokoro/samples/python3.9/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh index a62db989..ba3a707b 100755 --- a/.kokoro/test-samples-against-head.sh +++ b/.kokoro/test-samples-against-head.sh @@ -23,6 +23,4 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-bigquery-sqlalchemy - exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index f7713a95..11c042d3 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -24,8 +24,6 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-bigquery-sqlalchemy - # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then # preserving the test runner implementation. diff --git a/.repo-metadata.json b/.repo-metadata.json index ae545b37..e551b994 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -1,12 +1,13 @@ { - "name": "sqlalchemy-bigquery", - "name_pretty": "SQLAlchemy dialect for BigQuery", - "client_documentation": - "https://googleapis.dev/python/sqlalchemy-bigquery/latest/index.html", - "release_level": "beta", - "language": "python", - "library_type": "INTEGRATION", - "repo": "googleapis/python-bigquery-sqlalchemy", - "distribution_name": "sqlalchemy-bigquery", - "api_id": "bigquery.googleapis.com" + "name": "sqlalchemy-bigquery", + "name_pretty": "SQLAlchemy dialect for BigQuery", + "client_documentation": "https://googleapis.dev/python/sqlalchemy-bigquery/latest/index.html", + "release_level": "beta", + "language": "python", + "library_type": "INTEGRATION", + "repo": "googleapis/python-bigquery-sqlalchemy", + "distribution_name": "sqlalchemy-bigquery", + "api_id": "bigquery.googleapis.com", + "default_version": "", + "codeowner_team": "@googleapis/api-bigquery" } diff --git a/.trampolinerc b/.trampolinerc index 383b6ec8..0eee72ab 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -16,15 +16,26 @@ # Add required env vars here. required_envvars+=( - "STAGING_BUCKET" - "V2_STAGING_BUCKET" ) # Add env vars which are passed down into the container here. pass_down_envvars+=( + "NOX_SESSION" + ############### + # Docs builds + ############### "STAGING_BUCKET" "V2_STAGING_BUCKET" - "NOX_SESSION" + ################## + # Samples builds + ################## + "INSTALL_LIBRARY_FROM_SOURCE" + "RUN_TESTS_SESSION" + "BUILD_SPECIFIC_GCLOUD_PROJECT" + # Target directories. + "RUN_TESTS_DIRS" + # The nox session to run. + "RUN_TESTS_SESSION" ) # Prevent unintentional override on the default image. diff --git a/CHANGELOG.md b/CHANGELOG.md index 74b032b0..8495dff3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,15 @@ Older versions of this project were distributed as [pybigquery][0]. [2]: https://pypi.org/project/pybigquery/#history +### [1.2.1](https://www.github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.2.0...v1.2.1) (2021-10-27) + + +### Bug Fixes + +* avoid creating aliases for already-known tables ([#361](https://www.github.com/googleapis/python-bigquery-sqlalchemy/issues/361)) ([1ce4e14](https://www.github.com/googleapis/python-bigquery-sqlalchemy/commit/1ce4e14c81a4b378dfcfba808507e6c545f34841)) +* avoid scribbling on (reused) bind param ([#365](https://www.github.com/googleapis/python-bigquery-sqlalchemy/issues/365)) ([d28cac5](https://www.github.com/googleapis/python-bigquery-sqlalchemy/commit/d28cac5864f183c0ca503854973d837b17783d52)) +* include external tables in 'get_table_names' ([#363](https://www.github.com/googleapis/python-bigquery-sqlalchemy/issues/363)) ([5e158fe](https://www.github.com/googleapis/python-bigquery-sqlalchemy/commit/5e158fe8bb2394369c020337092b5cfdb01880e0)) + ## [1.2.0](https://www.github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.1.0...v1.2.0) (2021-09-09) diff --git a/dev_requirements.txt b/dev_requirements.txt index a092f5b0..e4a6a867 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -4,4 +4,4 @@ future==0.18.2 pytest==6.2.5 pytest-flake8==1.0.7 -pytz==2021.1 \ No newline at end of file +pytz==2021.3 \ No newline at end of file diff --git a/owlbot.py b/owlbot.py index dcec1b04..c3735f3e 100644 --- a/owlbot.py +++ b/owlbot.py @@ -63,7 +63,7 @@ ) s.replace( - ["noxfile.py"], "google/cloud", "sqlalchemy_bigquery", + ["noxfile.py"], "--cov=google", "--cov=sqlalchemy_bigquery", ) def place_before(path, text, *before_text, escape=None): diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index b008613f..93a9122c 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -87,7 +87,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] @@ -98,6 +98,10 @@ def get_pytest_env_vars() -> Dict[str, str]: "True", "true", ) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 1d27f79b..94f7ef6a 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -9,4 +9,4 @@ pyparsing==2.4.7 pytest==6.2.5 toml==0.10.2 typing-extensions==3.10.0.2 -zipp==3.5.0 +zipp==3.6.0 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 78b9ee50..5e266cdf 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,10 +1,10 @@ aiocontextvars==0.2.2 attrs==21.2.0 -cachetools==4.2.2 -certifi==2021.5.30 +cachetools==4.2.4 +certifi==2021.10.8 cffi==1.14.6 -charset-normalizer==2.0.4 -click==8.0.1 +charset-normalizer==2.0.7 +click==8.0.3 click-plugins==1.1.1 cligj==0.7.2 contextvars==2.4 @@ -13,21 +13,22 @@ Deprecated==1.2.13 Fiona==1.8.20 future==0.18.2 GeoAlchemy2==0.9.4 -geopandas==0.9.0 -google-api-core==2.0.1 -google-auth==2.0.2 -google-cloud-bigquery==2.26.0 -google-cloud-bigquery-storage==2.7.0 -google-cloud-core==2.0.0 -google-crc32c==1.1.2 -google-resumable-media==2.0.2 +geopandas==0.9.0; python_version < '3.7' +geopandas==0.10.0; python_version >= '3.7' +google-api-core==2.1.0 +google-auth==2.3.0 +google-cloud-bigquery==2.28.1 +google-cloud-bigquery-storage==2.9.1 +google-cloud-core==2.1.0 +google-crc32c==1.3.0 +google-resumable-media==2.0.3 googleapis-common-protos==1.53.0 -greenlet==1.1.1 -grpcio==1.40.0 -idna==3.2 +greenlet==1.1.2 +grpcio==1.41.0 +idna==3.3 immutables==0.16 importlib-metadata==4.8.1 -libcst==0.3.20 +libcst==0.3.21 munch==2.5.0 mypy-extensions==0.4.3 numpy==1.19.5; python_version < '3.7' @@ -39,8 +40,8 @@ opentelemetry-semantic-conventions==0.24b0 packaging==21.0 pandas==1.1.5; python_version < '3.7' pandas==1.3.2; python_version >= '3.7' -proto-plus==1.19.0 -protobuf==3.17.3 +proto-plus==1.19.5 +protobuf==3.18.1 pyarrow==5.0.0 pyasn1==0.4.8 pyasn1-modules==0.2.8 @@ -49,17 +50,17 @@ pyparsing==2.4.7 pyproj==3.0.1; python_version < '3.7' pyproj==3.1.0; python_version >= '3.7' python-dateutil==2.8.2 -pytz==2021.1 +pytz==2021.3 PyYAML==5.4.1 requests==2.26.0 rsa==4.7.2 Shapely==1.7.1 six==1.16.0 -SQLAlchemy==1.4.23 -sqlalchemy-bigquery==1.1.0 -tqdm==4.62.2 +SQLAlchemy==1.4.25 +sqlalchemy-bigquery==1.2.0 +tqdm==4.62.3 typing-extensions==3.10.0.2 typing-inspect==0.7.1 -urllib3==1.26.6 -wrapt==1.12.1 -zipp==3.5.0 +urllib3==1.26.7 +wrapt==1.13.2 +zipp==3.6.0 diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index f5b1d515..f2da562d 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -50,6 +50,7 @@ from sqlalchemy.engine.default import DefaultDialect, DefaultExecutionContext from sqlalchemy.engine.base import Engine from sqlalchemy.sql.schema import Column +from sqlalchemy.sql.schema import Table from sqlalchemy.sql import elements, selectable import re @@ -289,12 +290,18 @@ def visit_column( if isinstance(tablename, elements._truncated_label): tablename = self._truncated_identifier("alias", tablename) elif TABLE_VALUED_ALIAS_ALIASES in kwargs: - aliases = kwargs[TABLE_VALUED_ALIAS_ALIASES] - if tablename not in aliases: - aliases[tablename] = self.anon_map[ - f"{TABLE_VALUED_ALIAS_ALIASES} {tablename}" - ] - tablename = aliases[tablename] + known_tables = set( + from_.name + for from_ in self.compile_state.froms + if isinstance(from_, Table) + ) + if tablename not in known_tables: + aliases = kwargs[TABLE_VALUED_ALIAS_ALIASES] + if tablename not in aliases: + aliases[tablename] = self.anon_map[ + f"{TABLE_VALUED_ALIAS_ALIASES} {tablename}" + ] + tablename = aliases[tablename] return self.preparer.quote(tablename) + "." + name @@ -464,8 +471,14 @@ def visit_bindparam( # The NullType/known-type check has to do with some extreme # edge cases having to do with empty in-lists that get special # hijinks from SQLAlchemy that we don't want to disturb. :) + # + # Note that we do *not* want to overwrite the "real" bindparam + # here, because then we can't do a recompile later (e.g., first + # print the statment, then execute it). See issue #357. + # if getattr(bindparam, "expand_op", None) is not None: assert bindparam.expand_op.__name__.endswith("in_op") # in in + bindparam = bindparam._clone(maintain_key=True) bindparam.expanding = False unnest = True @@ -789,7 +802,7 @@ def create_connect_args(self, url): ) return ([client], {}) - def _get_table_or_view_names(self, connection, table_type, schema=None): + def _get_table_or_view_names(self, connection, item_types, schema=None): current_schema = schema or self.dataset_id get_table_name = ( self._build_formatted_table_id @@ -810,7 +823,7 @@ def _get_table_or_view_names(self, connection, table_type, schema=None): dataset.reference, page_size=self.list_tables_page_size ) for table in tables: - if table_type == table.table_type: + if table.table_type in item_types: result.append(get_table_name(table)) except google.api_core.exceptions.NotFound: # It's possible that the dataset was deleted between when we @@ -963,13 +976,15 @@ def get_table_names(self, connection, schema=None, **kw): if isinstance(connection, Engine): connection = connection.connect() - return self._get_table_or_view_names(connection, "TABLE", schema) + item_types = ["TABLE", "EXTERNAL"] + return self._get_table_or_view_names(connection, item_types, schema) def get_view_names(self, connection, schema=None, **kw): if isinstance(connection, Engine): connection = connection.connect() - return self._get_table_or_view_names(connection, "VIEW", schema) + item_types = ["VIEW", "MATERIALIZED_VIEW"] + return self._get_table_or_view_names(connection, item_types, schema) def do_rollback(self, dbapi_connection): # BigQuery has no support for transactions. diff --git a/sqlalchemy_bigquery/version.py b/sqlalchemy_bigquery/version.py index f7a8338b..66fe5995 100644 --- a/sqlalchemy_bigquery/version.py +++ b/sqlalchemy_bigquery/version.py @@ -17,4 +17,4 @@ # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -__version__ = "1.2.0" +__version__ = "1.2.1" diff --git a/tests/system/conftest.py b/tests/system/conftest.py index 7bf76a2d..04803ebd 100644 --- a/tests/system/conftest.py +++ b/tests/system/conftest.py @@ -23,8 +23,10 @@ import pytest import sqlalchemy +from google.api_core import exceptions from google.cloud import bigquery import test_utils.prefixer +import test_utils.retry from sqlalchemy_bigquery import BigQueryDialect @@ -128,7 +130,10 @@ def bigquery_regional_dataset(bigquery_client, bigquery_schema): @pytest.fixture(autouse=True) def cleanup_extra_tables(bigquery_client, bigquery_dataset): common = "sample", "sample_one_row", "sample_view", "sample_dml_empty" - for table in bigquery_client.list_tables(bigquery_dataset): + # Back-end may raise 403 for a dataset not ready yet. + retry_403 = test_utils.retry.RetryErrors(exceptions.Forbidden) + tables = retry_403(bigquery_client.list_tables)(bigquery_dataset) + for table in tables: if table.table_id not in common: bigquery_client.delete_table(table) diff --git a/tests/unit/test_compiler.py b/tests/unit/test_compiler.py index 88030b47..5da4e935 100644 --- a/tests/unit/test_compiler.py +++ b/tests/unit/test_compiler.py @@ -21,6 +21,7 @@ import sqlalchemy.exc from conftest import setup_table +from conftest import sqlalchemy_1_4_or_higher def test_constraints_are_ignored(faux_conn, metadata): @@ -53,3 +54,25 @@ def test_cant_compile_unnamed_column(faux_conn, metadata): match="Cannot compile Column object until its 'name' is assigned.", ): sqlalchemy.Column(sqlalchemy.Integer).compile(faux_conn) + + +@sqlalchemy_1_4_or_higher +def test_no_alias_for_known_tables(faux_conn, metadata): + # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/353 + table = setup_table( + faux_conn, + "table1", + metadata, + sqlalchemy.Column("foo", sqlalchemy.Integer), + sqlalchemy.Column("bar", sqlalchemy.ARRAY(sqlalchemy.Integer)), + ) + F = sqlalchemy.func + q = sqlalchemy.select(table.c.foo).where(F.unnest(table.c.bar).column_valued() == 1) + + expected_sql = ( + "SELECT `table1`.`foo` \n" + "FROM `table1`, unnest(`table1`.`bar`) AS `anon_1` \n" + "WHERE `anon_1` = %(param_1:INT64)s" + ) + found_sql = q.compile(faux_conn).string + assert found_sql == expected_sql diff --git a/tests/unit/test_sqlalchemy_bigquery.py b/tests/unit/test_sqlalchemy_bigquery.py index 75cbec42..e97b15ff 100644 --- a/tests/unit/test_sqlalchemy_bigquery.py +++ b/tests/unit/test_sqlalchemy_bigquery.py @@ -77,9 +77,11 @@ def table_item(dataset_id, table_id, type_="TABLE"): [ table_item("dataset_2", "d2t1"), table_item("dataset_2", "d2view", type_="VIEW"), + table_item("dataset_2", "d2ext", type_="EXTERNAL"), + table_item("dataset_2", "d2mv", type_="MATERIALIZED_VIEW"), ], ], - ["dataset_1.d1t1", "dataset_1.d1t2", "dataset_2.d2t1"], + ["dataset_1.d1t1", "dataset_1.d1t2", "dataset_2.d2t1", "dataset_2.d2ext"], ), ( [dataset_item("dataset_1"), dataset_item("dataset_deleted")], @@ -117,9 +119,11 @@ def test_get_table_names( [ table_item("dataset_2", "d2t1"), table_item("dataset_2", "d2view", type_="VIEW"), + table_item("dataset_2", "d2ext", type_="EXTERNAL"), + table_item("dataset_2", "d2mv", type_="MATERIALIZED_VIEW"), ], ], - ["dataset_1.d1view", "dataset_2.d2view"], + ["dataset_1.d1view", "dataset_2.d2view", "dataset_2.d2mv"], ), ( [dataset_item("dataset_1"), dataset_item("dataset_deleted")],