From b319d8265fd1f2ce32ebc116a32727f6a96a7a9f Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 13 Dec 2023 17:34:43 -0500 Subject: [PATCH 01/16] ci: Add python 3.11/3.12 as required checks (#931) --- .github/sync-repo-settings.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index 68af1253..38bd545c 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -14,6 +14,8 @@ branchProtectionRules: - 'Samples - Python 3.8' - 'Samples - Python 3.9' - 'Samples - Python 3.10' + - 'Samples - Python 3.11' + - 'Samples - Python 3.12' permissionRules: - team: actools-python permission: admin From 8986066a39069c4a3da582d49753d585ab1570bb Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 14 Dec 2023 13:44:01 +0100 Subject: [PATCH 02/16] chore(deps): update all dependencies (#899) --- dev_requirements.txt | 2 +- samples/snippets/requirements-test.txt | 14 ++++---- samples/snippets/requirements.txt | 46 +++++++++++++------------- 3 files changed, 31 insertions(+), 31 deletions(-) diff --git a/dev_requirements.txt b/dev_requirements.txt index ddc53054..1798fab5 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -2,4 +2,4 @@ sqlalchemy>=2.0.15,<2.1.0 google-cloud-bigquery>=1.6.0 pytest===6.2.5 pytest-flake8===1.1.0 # versions 1.1.1 and above require pytest 7 -pytz==2023.3 +pytz==2023.3.post1 diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 90537d96..552292b6 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,16 +1,16 @@ attrs==23.1.0 -click==8.1.6 -google-auth==2.22.0 -google-cloud-testutils==1.3.3 +click==8.1.7 +google-auth==2.25.2 +google-cloud-testutils==1.4.0 iniconfig==2.0.0 -packaging==23.1 -pluggy==1.2.0 +packaging==23.2 +pluggy==1.3.0 py==1.11.0 -pyasn1==0.5.0 +pyasn1==0.5.1 pyasn1-modules==0.3.0 pyparsing==3.1.1 pytest===6.2.5 rsa==4.9 six==1.16.0 toml==0.10.2 -typing-extensions==4.7.1 +typing-extensions==4.9.0 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index b15bf2cb..06eb49db 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,33 +1,33 @@ -alembic==1.11.2 -certifi==2023.7.22 -charset-normalizer==3.2.0 -geoalchemy2==0.14.1 -google-api-core[grpc]==2.11.1 -google-auth==2.22.0 -google-cloud-bigquery==3.11.4 -google-cloud-core==2.3.3 +alembic==1.13.0 +certifi==2023.11.17 +charset-normalizer==3.3.2 +geoalchemy2==0.14.2 +google-api-core[grpc]==2.15.0 +google-auth==2.25.2 +google-cloud-bigquery==3.14.0 +google-cloud-core==2.4.1 google-crc32c==1.5.0 -google-resumable-media==2.5.0 -googleapis-common-protos==1.60.0 -greenlet==3.0.1 -grpcio==1.59.0 -grpcio-status==1.57.0 -idna==3.4 -importlib-resources==6.0.1; python_version >= '3.8' -mako==1.2.4 +google-resumable-media==2.7.0 +googleapis-common-protos==1.62.0 +greenlet==3.0.2 +grpcio==1.60.0 +grpcio-status==1.60.0 +idna==3.6 +importlib-resources==6.1.1; python_version >= '3.8' +mako==1.3.0 markupsafe==2.1.3 -packaging==23.1 -proto-plus==1.22.3 -protobuf==4.24.0 -pyasn1==0.5.0 +packaging==23.2 +proto-plus==1.23.0 +protobuf==4.25.1 +pyasn1==0.5.1 pyasn1-modules==0.3.0 pyparsing==3.1.1 python-dateutil==2.8.2 -pytz==2023.3 +pytz==2023.3.post1 requests==2.31.0 rsa==4.9 shapely==2.0.2 six==1.16.0 sqlalchemy===1.4.27 -typing-extensions==4.7.1 -urllib3==1.26.18 +typing-extensions==4.9.0 +urllib3==2.1.0 From 3960ac3b5af09b130a832cc78b6dd76b369add42 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 14 Dec 2023 14:13:11 +0100 Subject: [PATCH 03/16] chore(deps): update dependency google-cloud-bigquery to v3.14.1 (#933) --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 06eb49db..a1ea3358 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -4,7 +4,7 @@ charset-normalizer==3.3.2 geoalchemy2==0.14.2 google-api-core[grpc]==2.15.0 google-auth==2.25.2 -google-cloud-bigquery==3.14.0 +google-cloud-bigquery==3.14.1 google-cloud-core==2.4.1 google-crc32c==1.5.0 google-resumable-media==2.7.0 From ac74a3434c437f60b6f215ac09dea224aa406f8a Mon Sep 17 00:00:00 2001 From: Shachar Snapiri Date: Tue, 19 Dec 2023 19:28:10 +0200 Subject: [PATCH 04/16] fix: Avoid implicit join when using join with unnest (#924) * fix: avoid implicit join when using join with unnest When using JOIN with UNNEST statements, and then creating a SELECT statement based on it, the UNNESTed table will appear twice in the FROM clause, causing an implicit join of the table with itself * Add safety checks * Add tests and fix cover --- sqlalchemy_bigquery/base.py | 8 ++ tests/unit/test_compiler.py | 166 +++++++++++++++++++++++++++++++++++- 2 files changed, 173 insertions(+), 1 deletion(-) diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index 5297f223..03488250 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -269,6 +269,14 @@ def _known_tables(self): if table is not None: known_tables.add(table.name) + # If we have the table in the `from` of our parent, do not add the alias + # as this will add the table twice and cause an implicit JOIN for that + # table on itself + asfrom_froms = self.stack[-1].get("asfrom_froms", []) + for from_ in asfrom_froms: + if isinstance(from_, Table): + known_tables.add(from_.name) + return known_tables def visit_column( diff --git a/tests/unit/test_compiler.py b/tests/unit/test_compiler.py index db02e593..139b6cbc 100644 --- a/tests/unit/test_compiler.py +++ b/tests/unit/test_compiler.py @@ -21,7 +21,7 @@ import sqlalchemy.exc from .conftest import setup_table -from .conftest import sqlalchemy_1_4_or_higher +from .conftest import sqlalchemy_1_4_or_higher, sqlalchemy_before_1_4 def test_constraints_are_ignored(faux_conn, metadata): @@ -114,3 +114,167 @@ def test_no_alias_for_known_tables_cte(faux_conn, metadata): ) found_cte_sql = q.compile(faux_conn).string assert found_cte_sql == expected_cte_sql + + +def prepare_implicit_join_base_query( + faux_conn, metadata, select_from_table2, old_syntax +): + table1 = setup_table( + faux_conn, "table1", metadata, sqlalchemy.Column("foo", sqlalchemy.Integer) + ) + table2 = setup_table( + faux_conn, + "table2", + metadata, + sqlalchemy.Column("foos", sqlalchemy.ARRAY(sqlalchemy.Integer)), + sqlalchemy.Column("bar", sqlalchemy.Integer), + ) + F = sqlalchemy.func + + unnested_col_name = "unnested_foos" + unnested_foos = F.unnest(table2.c.foos).alias(unnested_col_name) + unnested_foo_col = sqlalchemy.Column(unnested_col_name) + + # Set up initial query + cols = [table1.c.foo, table2.c.bar] if select_from_table2 else [table1.c.foo] + q = sqlalchemy.select(cols) if old_syntax else sqlalchemy.select(*cols) + q = q.select_from(unnested_foos.join(table1, table1.c.foo == unnested_foo_col)) + return q + + +@sqlalchemy_before_1_4 +def test_no_implicit_join_asterix_for_inner_unnest_before_1_4(faux_conn, metadata): + # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 + q = prepare_implicit_join_base_query(faux_conn, metadata, True, True) + expected_initial_sql = ( + "SELECT `table1`.`foo`, `table2`.`bar` \n" + "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`" + ) + found_initial_sql = q.compile(faux_conn).string + assert found_initial_sql == expected_initial_sql + + q = sqlalchemy.select(["*"]).select_from(q) + + expected_outer_sql = ( + "SELECT * \n" + "FROM (SELECT `table1`.`foo` AS `foo`, `table2`.`bar` AS `bar` \n" + "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`)" + ) + found_outer_sql = q.compile(faux_conn).string + assert found_outer_sql == expected_outer_sql + + +@sqlalchemy_1_4_or_higher +def test_no_implicit_join_asterix_for_inner_unnest(faux_conn, metadata): + # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 + q = prepare_implicit_join_base_query(faux_conn, metadata, True, False) + expected_initial_sql = ( + "SELECT `table1`.`foo`, `table2`.`bar` \n" + "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`" + ) + found_initial_sql = q.compile(faux_conn).string + assert found_initial_sql == expected_initial_sql + + q = q.subquery() + q = sqlalchemy.select("*").select_from(q) + + expected_outer_sql = ( + "SELECT * \n" + "FROM (SELECT `table1`.`foo` AS `foo`, `table2`.`bar` AS `bar` \n" + "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`) AS `anon_1`" + ) + found_outer_sql = q.compile(faux_conn).string + assert found_outer_sql == expected_outer_sql + + +@sqlalchemy_before_1_4 +def test_no_implicit_join_for_inner_unnest_before_1_4(faux_conn, metadata): + # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 + q = prepare_implicit_join_base_query(faux_conn, metadata, True, True) + expected_initial_sql = ( + "SELECT `table1`.`foo`, `table2`.`bar` \n" + "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`" + ) + found_initial_sql = q.compile(faux_conn).string + assert found_initial_sql == expected_initial_sql + + q = sqlalchemy.select([q.c.foo]).select_from(q) + + expected_outer_sql = ( + "SELECT `foo` \n" + "FROM (SELECT `table1`.`foo` AS `foo`, `table2`.`bar` AS `bar` \n" + "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`)" + ) + found_outer_sql = q.compile(faux_conn).string + assert found_outer_sql == expected_outer_sql + + +@sqlalchemy_1_4_or_higher +def test_no_implicit_join_for_inner_unnest(faux_conn, metadata): + # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 + q = prepare_implicit_join_base_query(faux_conn, metadata, True, False) + expected_initial_sql = ( + "SELECT `table1`.`foo`, `table2`.`bar` \n" + "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`" + ) + found_initial_sql = q.compile(faux_conn).string + assert found_initial_sql == expected_initial_sql + + q = q.subquery() + q = sqlalchemy.select(q.c.foo).select_from(q) + + expected_outer_sql = ( + "SELECT `anon_1`.`foo` \n" + "FROM (SELECT `table1`.`foo` AS `foo`, `table2`.`bar` AS `bar` \n" + "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`) AS `anon_1`" + ) + found_outer_sql = q.compile(faux_conn).string + assert found_outer_sql == expected_outer_sql + + +@sqlalchemy_1_4_or_higher +def test_no_implicit_join_asterix_for_inner_unnest_no_table2_column( + faux_conn, metadata +): + # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 + q = prepare_implicit_join_base_query(faux_conn, metadata, False, False) + expected_initial_sql = ( + "SELECT `table1`.`foo` \n" + "FROM `table2` `table2_1`, unnest(`table2_1`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`" + ) + found_initial_sql = q.compile(faux_conn).string + assert found_initial_sql == expected_initial_sql + + q = q.subquery() + q = sqlalchemy.select("*").select_from(q) + + expected_outer_sql = ( + "SELECT * \n" + "FROM (SELECT `table1`.`foo` AS `foo` \n" + "FROM `table2` `table2_1`, unnest(`table2_1`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`) AS `anon_1`" + ) + found_outer_sql = q.compile(faux_conn).string + assert found_outer_sql == expected_outer_sql + + +@sqlalchemy_1_4_or_higher +def test_no_implicit_join_for_inner_unnest_no_table2_column(faux_conn, metadata): + # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 + q = prepare_implicit_join_base_query(faux_conn, metadata, False, False) + expected_initial_sql = ( + "SELECT `table1`.`foo` \n" + "FROM `table2` `table2_1`, unnest(`table2_1`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`" + ) + found_initial_sql = q.compile(faux_conn).string + assert found_initial_sql == expected_initial_sql + + q = q.subquery() + q = sqlalchemy.select(q.c.foo).select_from(q) + + expected_outer_sql = ( + "SELECT `anon_1`.`foo` \n" + "FROM (SELECT `table1`.`foo` AS `foo` \n" + "FROM `table2` `table2_1`, unnest(`table2_1`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`) AS `anon_1`" + ) + found_outer_sql = q.compile(faux_conn).string + assert found_outer_sql == expected_outer_sql From c2c2958886cc3c8a51c4f5fc1a8c36b65921edd9 Mon Sep 17 00:00:00 2001 From: Nicolas Lenepveu Date: Wed, 10 Jan 2024 18:19:24 +0100 Subject: [PATCH 05/16] feat: allow to set clustering and time partitioning options at table creation (#928) * refactor: standardize bigquery options handling to manage more options * feat: handle table partitioning, table clustering and more table options (expiration_timestamp, expiration_timestamp, require_partition_filter, default_rounding_mode) via create_table dialect options * fix: having clustering fields and partitioning exposed has table indexes leads to bad autogenerated version file def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.drop_index('clustering', table_name='dataset.some_table') op.drop_index('partition', table_name='dataset.some_table') # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.create_index('partition', 'dataset.some_table', ['createdAt'], unique=False) op.create_index('clustering', 'dataset.some_table', ['id', 'createdAt'], unique=False) # ### end Alembic commands ### * docs: update README to describe how to create clustered and partitioned table as well as other newly supported table options * test: adjust system tests since indexes are no longer populated from table partitions and clustering info * test: alembic now supports creating partitioned tables * test: run integration tests with all the new create_table options * chore: rename variables to represent what it is a bit more clearly * fix: assertions should no be used to validate user inputs * refactor: extract process_option_value() from post_create_table() for improved readability * docs: add docstring to post_create_table() and _process_option_value() * test: increase code coverage by testing error cases * refactor: better represent the distinction between the option value data type check and the transformation in SQL literal * test: adding test cases for _validate_option_value_type() and _process_option_value() * chore: coding style * chore: reformat files with black * test: typo in tests * feat: change the option name for partitioning to leverage the TimePartitioning interface of the Python Client for Google BigQuery * fix: TimePartitioning.field is optional * chore: coding style * test: fix system test with table option bigquery_require_partition_filter * feat: add support for experimental range_partitioning option * test: fix system test with new bigquery_time_partitioning table option * docs: update README with time_partitioning and range_partitioning * test: relevant comments in unit tests * test: cover all error cases * chore: no magic numbers * chore: consistency in docstrings * chore: no magic number * chore: better error types * chore: fix W605 invalid escape sequence --- README.rst | 53 ++- sqlalchemy_bigquery/base.py | 280 +++++++++++-- tests/system/test_alembic.py | 17 +- tests/system/test_sqlalchemy_bigquery.py | 22 +- tests/unit/conftest.py | 7 + tests/unit/test_catalog_functions.py | 13 +- tests/unit/test_table_options.py | 474 +++++++++++++++++++++++ 7 files changed, 799 insertions(+), 67 deletions(-) create mode 100644 tests/unit/test_table_options.py diff --git a/README.rst b/README.rst index a2036289..17534886 100644 --- a/README.rst +++ b/README.rst @@ -292,7 +292,12 @@ To add metadata to a table: .. code-block:: python - table = Table('mytable', ..., bigquery_description='my table description', bigquery_friendly_name='my table friendly name') + table = Table('mytable', ..., + bigquery_description='my table description', + bigquery_friendly_name='my table friendly name', + bigquery_default_rounding_mode="ROUND_HALF_EVEN", + bigquery_expiration_timestamp=datetime.datetime.fromisoformat("2038-01-01T00:00:00+00:00"), + ) To add metadata to a column: @@ -300,6 +305,52 @@ To add metadata to a column: Column('mycolumn', doc='my column description') +To create a clustered table: + +.. code-block:: python + + table = Table('mytable', ..., bigquery_clustering_fields=["a", "b", "c"]) + +To create a time-unit column-partitioned table: + +.. code-block:: python + + from google.cloud import bigquery + + table = Table('mytable', ..., + bigquery_time_partitioning=bigquery.TimePartitioning( + field="mytimestamp", + type_="MONTH", + expiration_ms=1000 * 60 * 60 * 24 * 30 * 6, # 6 months + ), + bigquery_require_partition_filter=True, + ) + +To create an ingestion-time partitioned table: + +.. code-block:: python + + from google.cloud import bigquery + + table = Table('mytable', ..., + bigquery_time_partitioning=bigquery.TimePartitioning(), + bigquery_require_partition_filter=True, + ) + +To create an integer-range partitioned table + +.. code-block:: python + + from google.cloud import bigquery + + table = Table('mytable', ..., + bigquery_range_partitioning=bigquery.RangePartitioning( + field="zipcode", + range_=bigquery.PartitionRange(start=0, end=100000, interval=10), + ), + bigquery_require_partition_filter=True, + ) + Threading and Multiprocessing ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index 03488250..f4266f13 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -19,6 +19,7 @@ """Integration between SQLAlchemy and BigQuery.""" +import datetime from decimal import Decimal import random import operator @@ -27,7 +28,11 @@ from google import auth import google.api_core.exceptions from google.cloud.bigquery import dbapi -from google.cloud.bigquery.table import TableReference +from google.cloud.bigquery.table import ( + RangePartitioning, + TableReference, + TimePartitioning, +) from google.api_core.exceptions import NotFound import packaging.version import sqlalchemy @@ -35,7 +40,7 @@ import sqlalchemy.sql.functions import sqlalchemy.sql.sqltypes import sqlalchemy.sql.type_api -from sqlalchemy.exc import NoSuchTableError +from sqlalchemy.exc import NoSuchTableError, NoSuchColumnError from sqlalchemy import util from sqlalchemy.ext.compiler import compiles from sqlalchemy.sql.compiler import ( @@ -631,6 +636,13 @@ def visit_NUMERIC(self, type_, **kw): class BigQueryDDLCompiler(DDLCompiler): + option_datatype_mapping = { + "friendly_name": str, + "expiration_timestamp": datetime.datetime, + "require_partition_filter": bool, + "default_rounding_mode": str, + } + # BigQuery has no support for foreign keys. def visit_foreign_key_constraint(self, constraint): return None @@ -654,26 +666,99 @@ def get_column_specification(self, column, **kwargs): return colspec def post_create_table(self, table): + """ + Constructs additional SQL clauses for table creation in BigQuery. + + This function processes the BigQuery dialect-specific options and generates SQL clauses for partitioning, + clustering, and other table options. + + Args: + table (Table): The SQLAlchemy Table object for which the SQL is being generated. + + Returns: + str: A string composed of SQL clauses for time partitioning, clustering, and other BigQuery specific + options, each separated by a newline. Returns an empty string if no such options are specified. + + Raises: + TypeError: If the time_partitioning option is not a `TimePartitioning` object or if the clustering_fields option is not a list. + NoSuchColumnError: If any field specified in clustering_fields does not exist in the table. + """ + bq_opts = table.dialect_options["bigquery"] - opts = [] - if ("description" in bq_opts) or table.comment: - description = process_string_literal( - bq_opts.get("description", table.comment) + options = {} + clauses = [] + + if ( + bq_opts.get("time_partitioning") is not None + and bq_opts.get("range_partitioning") is not None + ): + raise ValueError( + "biquery_time_partitioning and bigquery_range_partitioning" + " dialect options are mutually exclusive." + ) + + if (time_partitioning := bq_opts.get("time_partitioning")) is not None: + self._raise_for_type( + "time_partitioning", + time_partitioning, + TimePartitioning, ) - opts.append(f"description={description}") - if "friendly_name" in bq_opts: - opts.append( - "friendly_name={}".format( - process_string_literal(bq_opts["friendly_name"]) + if time_partitioning.expiration_ms: + _24hours = 1000 * 60 * 60 * 24 + options["partition_expiration_days"] = ( + time_partitioning.expiration_ms / _24hours ) + + partition_by_clause = self._process_time_partitioning( + table, + time_partitioning, ) - if opts: - return "\nOPTIONS({})".format(", ".join(opts)) + clauses.append(partition_by_clause) - return "" + if (range_partitioning := bq_opts.get("range_partitioning")) is not None: + self._raise_for_type( + "range_partitioning", + range_partitioning, + RangePartitioning, + ) + + partition_by_clause = self._process_range_partitioning( + table, + range_partitioning, + ) + + clauses.append(partition_by_clause) + + if (clustering_fields := bq_opts.get("clustering_fields")) is not None: + self._raise_for_type("clustering_fields", clustering_fields, list) + + for field in clustering_fields: + if field not in table.c: + raise NoSuchColumnError(field) + + clauses.append(f"CLUSTER BY {', '.join(clustering_fields)}") + + if ("description" in bq_opts) or table.comment: + description = bq_opts.get("description", table.comment) + self._validate_option_value_type("description", description) + options["description"] = description + + for option in self.option_datatype_mapping: + if option in bq_opts: + options[option] = bq_opts.get(option) + + if options: + individual_option_statements = [ + "{}={}".format(k, self._process_option_value(v)) + for (k, v) in options.items() + if self._validate_option_value_type(k, v) + ] + clauses.append(f"OPTIONS({', '.join(individual_option_statements)})") + + return " " + "\n".join(clauses) def visit_set_table_comment(self, create): table_name = self.preparer.format_table(create.element) @@ -686,6 +771,152 @@ def visit_drop_table_comment(self, drop): table_name = self.preparer.format_table(drop.element) return f"ALTER TABLE {table_name} SET OPTIONS(description=null)" + def _validate_option_value_type(self, option: str, value): + """ + Validates the type of the given option value against the expected data type. + + Args: + option (str): The name of the option to be validated. + value: The value of the dialect option whose type is to be checked. The type of this parameter + is dynamic and is verified against the expected type in `self.option_datatype_mapping`. + + Returns: + bool: True if the type of the value matches the expected type, or if the option is not found in + `self.option_datatype_mapping`. + + Raises: + TypeError: If the type of the provided value does not match the expected type as defined in + `self.option_datatype_mapping`. + """ + if option in self.option_datatype_mapping: + self._raise_for_type( + option, + value, + self.option_datatype_mapping[option], + ) + + return True + + def _raise_for_type(self, option, value, expected_type): + if type(value) is not expected_type: + raise TypeError( + f"bigquery_{option} dialect option accepts only {expected_type}," + f" provided {repr(value)}" + ) + + def _process_time_partitioning( + self, table: Table, time_partitioning: TimePartitioning + ): + """ + Generates a SQL 'PARTITION BY' clause for partitioning a table by a date or timestamp. + + Args: + - table (Table): The SQLAlchemy table object representing the BigQuery table to be partitioned. + - time_partitioning (TimePartitioning): The time partitioning details, + including the field to be used for partitioning. + + Returns: + - str: A SQL 'PARTITION BY' clause that uses either TIMESTAMP_TRUNC or DATE_TRUNC to + partition data on the specified field. + + Example: + - Given a table with a TIMESTAMP type column 'event_timestamp' and setting + 'time_partitioning.field' to 'event_timestamp', the function returns + "PARTITION BY TIMESTAMP_TRUNC(event_timestamp, DAY)". + """ + field = "_PARTITIONDATE" + trunc_fn = "DATE_TRUNC" + + if time_partitioning.field is not None: + field = time_partitioning.field + if isinstance( + table.columns[time_partitioning.field].type, + sqlalchemy.sql.sqltypes.TIMESTAMP, + ): + trunc_fn = "TIMESTAMP_TRUNC" + + return f"PARTITION BY {trunc_fn}({field}, {time_partitioning.type_})" + + def _process_range_partitioning( + self, table: Table, range_partitioning: RangePartitioning + ): + """ + Generates a SQL 'PARTITION BY' clause for partitioning a table by a range of integers. + + Args: + - table (Table): The SQLAlchemy table object representing the BigQuery table to be partitioned. + - range_partitioning (RangePartitioning): The RangePartitioning object containing the + partitioning field, range start, range end, and interval. + + Returns: + - str: A SQL string for range partitioning using RANGE_BUCKET and GENERATE_ARRAY functions. + + Raises: + - AttributeError: If the partitioning field is not defined. + - ValueError: If the partitioning field (i.e. column) data type is not an integer. + - TypeError: If the partitioning range start/end values are not integers. + + Example: + "PARTITION BY RANGE_BUCKET(zipcode, GENERATE_ARRAY(0, 100000, 10))" + """ + if range_partitioning.field is None: + raise AttributeError( + "bigquery_range_partitioning expects field to be defined" + ) + + if not isinstance( + table.columns[range_partitioning.field].type, + sqlalchemy.sql.sqltypes.INT, + ): + raise ValueError( + "bigquery_range_partitioning expects field (i.e. column) data type to be INTEGER" + ) + + range_ = range_partitioning.range_ + + if not isinstance(range_.start, int): + raise TypeError( + "bigquery_range_partitioning expects range_.start to be an int," + f" provided {repr(range_.start)}" + ) + + if not isinstance(range_.end, int): + raise TypeError( + "bigquery_range_partitioning expects range_.end to be an int," + f" provided {repr(range_.end)}" + ) + + default_interval = 1 + + return f"PARTITION BY RANGE_BUCKET({range_partitioning.field}, GENERATE_ARRAY({range_.start}, {range_.end}, {range_.interval or default_interval}))" + + def _process_option_value(self, value): + """ + Transforms the given option value into a literal representation suitable for SQL queries in BigQuery. + + Args: + value: The value to be transformed. + + Returns: + The processed value in a format suitable for inclusion in a SQL query. + + Raises: + NotImplementedError: When there is no transformation registered for a data type. + """ + option_casting = { + # Mapping from option type to its casting method + str: lambda x: process_string_literal(x), + int: lambda x: x, + float: lambda x: x, + bool: lambda x: "true" if x else "false", + datetime.datetime: lambda x: BQTimestamp.process_timestamp_literal(x), + } + + if (option_cast := option_casting.get(type(value))) is not None: + return option_cast(value) + + raise NotImplementedError(f"No transformation registered for {repr(value)}") + def process_string_literal(value): return repr(value.replace("%", "%%")) @@ -997,25 +1228,8 @@ def get_pk_constraint(self, connection, table_name, schema=None, **kw): return {"constrained_columns": []} def get_indexes(self, connection, table_name, schema=None, **kw): - table = self._get_table(connection, table_name, schema) - indexes = [] - if table.time_partitioning: - indexes.append( - { - "name": "partition", - "column_names": [table.time_partitioning.field], - "unique": False, - } - ) - if table.clustering_fields: - indexes.append( - { - "name": "clustering", - "column_names": table.clustering_fields, - "unique": False, - } - ) - return indexes + # BigQuery has no support for indexes. + return [] def get_schema_names(self, connection, **kw): if isinstance(connection, Engine): diff --git a/tests/system/test_alembic.py b/tests/system/test_alembic.py index 1948a19a..30308c68 100644 --- a/tests/system/test_alembic.py +++ b/tests/system/test_alembic.py @@ -23,7 +23,7 @@ from sqlalchemy import Column, DateTime, Integer, String, Numeric import google.api_core.exceptions -from google.cloud.bigquery import SchemaField +from google.cloud.bigquery import SchemaField, TimePartitioning alembic = pytest.importorskip("alembic") @@ -138,15 +138,12 @@ def test_alembic_scenario(alembic_table): op.drop_table("accounts") assert alembic_table("accounts") is None - op.execute( - """ - create table transactions( - account INT64 NOT NULL, - transaction_time DATETIME NOT NULL, - amount NUMERIC(11, 2) NOT NULL - ) - partition by DATE(transaction_time) - """ + op.create_table( + "transactions", + Column("account", Integer, nullable=False), + Column("transaction_time", DateTime(), nullable=False), + Column("amount", Numeric(11, 2), nullable=False), + bigquery_time_partitioning=TimePartitioning(field="transaction_time"), ) op.alter_column("transactions", "amount", nullable=True) diff --git a/tests/system/test_sqlalchemy_bigquery.py b/tests/system/test_sqlalchemy_bigquery.py index 62b534ff..cccbd4bb 100644 --- a/tests/system/test_sqlalchemy_bigquery.py +++ b/tests/system/test_sqlalchemy_bigquery.py @@ -22,6 +22,8 @@ import datetime import decimal +from google.cloud.bigquery import TimePartitioning + from sqlalchemy.engine import create_engine from sqlalchemy.schema import Table, MetaData, Column from sqlalchemy.ext.declarative import declarative_base @@ -539,6 +541,14 @@ def test_create_table(engine, bigquery_dataset): Column("binary_c", sqlalchemy.BINARY), bigquery_description="test table description", bigquery_friendly_name="test table name", + bigquery_expiration_timestamp=datetime.datetime(2183, 3, 26, 8, 30, 0), + bigquery_time_partitioning=TimePartitioning( + field="timestamp_c", + expiration_ms=1000 * 60 * 60 * 24 * 30, # 30 days + ), + bigquery_require_partition_filter=True, + bigquery_default_rounding_mode="ROUND_HALF_EVEN", + bigquery_clustering_fields=["integer_c", "decimal_c"], ) meta.create_all(engine) meta.drop_all(engine) @@ -594,17 +604,7 @@ def test_view_names(inspector, inspector_using_test_dataset, bigquery_dataset): def test_get_indexes(inspector, inspector_using_test_dataset, bigquery_dataset): for _ in [f"{bigquery_dataset}.sample", f"{bigquery_dataset}.sample_one_row"]: indexes = inspector.get_indexes(f"{bigquery_dataset}.sample") - assert len(indexes) == 2 - assert indexes[0] == { - "name": "partition", - "column_names": ["timestamp"], - "unique": False, - } - assert indexes[1] == { - "name": "clustering", - "column_names": ["integer", "string"], - "unique": False, - } + assert len(indexes) == 0 def test_get_columns(inspector, inspector_using_test_dataset, bigquery_dataset): diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index f808b380..6f197196 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -25,6 +25,8 @@ import pytest import sqlalchemy +from sqlalchemy_bigquery.base import BigQueryDDLCompiler, BigQueryDialect + from . import fauxdbi sqlalchemy_version = packaging.version.parse(sqlalchemy.__version__) @@ -91,6 +93,11 @@ def metadata(): return sqlalchemy.MetaData() +@pytest.fixture() +def ddl_compiler(): + return BigQueryDDLCompiler(BigQueryDialect(), None) + + def setup_table(connection, name, *columns, initial_data=(), **kw): metadata = sqlalchemy.MetaData() table = sqlalchemy.Table(name, metadata, *columns, **kw) diff --git a/tests/unit/test_catalog_functions.py b/tests/unit/test_catalog_functions.py index 78614c9f..7eab7b7b 100644 --- a/tests/unit/test_catalog_functions.py +++ b/tests/unit/test_catalog_functions.py @@ -126,18 +126,7 @@ def test_get_indexes(faux_conn): client.tables.foo.time_partitioning = TimePartitioning(field="tm") client.tables.foo.clustering_fields = ["user_email", "store_code"] - assert faux_conn.dialect.get_indexes(faux_conn, "foo") == [ - dict( - name="partition", - column_names=["tm"], - unique=False, - ), - dict( - name="clustering", - column_names=["user_email", "store_code"], - unique=False, - ), - ] + assert faux_conn.dialect.get_indexes(faux_conn, "foo") == [] def test_no_table_pk_constraint(faux_conn): diff --git a/tests/unit/test_table_options.py b/tests/unit/test_table_options.py new file mode 100644 index 00000000..2147fb1d --- /dev/null +++ b/tests/unit/test_table_options.py @@ -0,0 +1,474 @@ +# Copyright (c) 2021 The sqlalchemy-bigquery Authors +# +# Permission is hereby granted, free of charge, to any person obtaining a copy of +# this software and associated documentation files (the "Software"), to deal in +# the Software without restriction, including without limitation the rights to +# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +# the Software, and to permit persons to whom the Software is furnished to do so, +# subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +import datetime +import sqlite3 +import pytest +import sqlalchemy + +from google.cloud.bigquery import ( + PartitionRange, + RangePartitioning, + TimePartitioning, + TimePartitioningType, +) + +from .conftest import setup_table + + +def test_table_expiration_timestamp_dialect_option(faux_conn): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("createdAt", sqlalchemy.DateTime), + bigquery_expiration_timestamp=datetime.datetime.fromisoformat( + "2038-01-01T00:00:00+00:00" + ), + ) + + assert " ".join(faux_conn.test_data["execute"][-1][0].strip().split()) == ( + "CREATE TABLE `some_table` ( `createdAt` DATETIME )" + " OPTIONS(expiration_timestamp=TIMESTAMP '2038-01-01 00:00:00+00:00')" + ) + + +def test_table_default_rounding_mode_dialect_option(faux_conn): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("createdAt", sqlalchemy.DateTime), + bigquery_default_rounding_mode="ROUND_HALF_EVEN", + ) + + assert " ".join(faux_conn.test_data["execute"][-1][0].strip().split()) == ( + "CREATE TABLE `some_table` ( `createdAt` DATETIME )" + " OPTIONS(default_rounding_mode='ROUND_HALF_EVEN')" + ) + + +def test_table_clustering_fields_dialect_option_no_such_column(faux_conn): + with pytest.raises(sqlalchemy.exc.NoSuchColumnError): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("id", sqlalchemy.Integer), + sqlalchemy.Column("createdAt", sqlalchemy.DateTime), + bigquery_clustering_fields=["country", "unknown"], + ) + + +def test_table_clustering_fields_dialect_option(faux_conn): + # expect table creation to fail as SQLite does not support clustering + with pytest.raises(sqlite3.OperationalError): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("id", sqlalchemy.Integer), + sqlalchemy.Column("country", sqlalchemy.Text), + sqlalchemy.Column("town", sqlalchemy.Text), + bigquery_clustering_fields=["country", "town"], + ) + + assert " ".join(faux_conn.test_data["execute"][-1][0].strip().split()) == ( + "CREATE TABLE `some_table` ( `id` INT64, `country` STRING, `town` STRING )" + " CLUSTER BY country, town" + ) + + +def test_table_clustering_fields_dialect_option_type_error(faux_conn): + # expect TypeError when bigquery_clustering_fields is not a list + with pytest.raises(TypeError): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("id", sqlalchemy.Integer), + sqlalchemy.Column("country", sqlalchemy.Text), + sqlalchemy.Column("town", sqlalchemy.Text), + bigquery_clustering_fields="country, town", + ) + + +def test_table_time_partitioning_dialect_option(faux_conn): + # expect table creation to fail as SQLite does not support partitioned tables + with pytest.raises(sqlite3.OperationalError): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("id", sqlalchemy.Integer), + sqlalchemy.Column("createdAt", sqlalchemy.DateTime), + bigquery_time_partitioning=TimePartitioning(), + ) + + assert " ".join(faux_conn.test_data["execute"][-1][0].strip().split()) == ( + "CREATE TABLE `some_table` ( `id` INT64, `createdAt` DATETIME )" + " PARTITION BY DATE_TRUNC(_PARTITIONDATE, DAY)" + ) + + +def test_table_require_partition_filter_dialect_option(faux_conn): + # expect table creation to fail as SQLite does not support partitioned tables + with pytest.raises(sqlite3.OperationalError): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("createdAt", sqlalchemy.DateTime), + bigquery_time_partitioning=TimePartitioning(field="createdAt"), + bigquery_require_partition_filter=True, + ) + + assert " ".join(faux_conn.test_data["execute"][-1][0].strip().split()) == ( + "CREATE TABLE `some_table` ( `createdAt` DATETIME )" + " PARTITION BY DATE_TRUNC(createdAt, DAY)" + " OPTIONS(require_partition_filter=true)" + ) + + +def test_table_time_partitioning_with_field_dialect_option(faux_conn): + # expect table creation to fail as SQLite does not support partitioned tables + with pytest.raises(sqlite3.OperationalError): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("id", sqlalchemy.Integer), + sqlalchemy.Column("createdAt", sqlalchemy.DateTime), + bigquery_time_partitioning=TimePartitioning(field="createdAt"), + ) + + assert " ".join(faux_conn.test_data["execute"][-1][0].strip().split()) == ( + "CREATE TABLE `some_table` ( `id` INT64, `createdAt` DATETIME )" + " PARTITION BY DATE_TRUNC(createdAt, DAY)" + ) + + +def test_table_time_partitioning_by_month_dialect_option(faux_conn): + # expect table creation to fail as SQLite does not support partitioned tables + with pytest.raises(sqlite3.OperationalError): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("id", sqlalchemy.Integer), + sqlalchemy.Column("createdAt", sqlalchemy.DateTime), + bigquery_time_partitioning=TimePartitioning( + field="createdAt", + type_=TimePartitioningType.MONTH, + ), + ) + + assert " ".join(faux_conn.test_data["execute"][-1][0].strip().split()) == ( + "CREATE TABLE `some_table` ( `id` INT64, `createdAt` DATETIME )" + " PARTITION BY DATE_TRUNC(createdAt, MONTH)" + ) + + +def test_table_time_partitioning_with_timestamp_dialect_option(faux_conn): + # expect table creation to fail as SQLite does not support partitioned tables + with pytest.raises(sqlite3.OperationalError): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("id", sqlalchemy.Integer), + sqlalchemy.Column("createdAt", sqlalchemy.TIMESTAMP), + bigquery_time_partitioning=TimePartitioning(field="createdAt"), + ) + + assert " ".join(faux_conn.test_data["execute"][-1][0].strip().split()) == ( + "CREATE TABLE `some_table` ( `id` INT64, `createdAt` TIMESTAMP )" + " PARTITION BY TIMESTAMP_TRUNC(createdAt, DAY)" + ) + + +def test_table_time_partitioning_dialect_option_partition_expiration_days(faux_conn): + # expect table creation to fail as SQLite does not support partitioned tables + with pytest.raises(sqlite3.OperationalError): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("createdAt", sqlalchemy.DateTime), + bigquery_time_partitioning=TimePartitioning( + field="createdAt", + type_="DAY", + expiration_ms=21600000, + ), + ) + + assert " ".join(faux_conn.test_data["execute"][-1][0].strip().split()) == ( + "CREATE TABLE `some_table` ( `createdAt` DATETIME )" + " PARTITION BY DATE_TRUNC(createdAt, DAY)" + " OPTIONS(partition_expiration_days=0.25)" + ) + + +def test_table_partitioning_dialect_option_type_error(faux_conn): + # expect TypeError when bigquery_time_partitioning is not a TimePartitioning object + with pytest.raises(TypeError): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("id", sqlalchemy.Integer), + sqlalchemy.Column("createdAt", sqlalchemy.DateTime), + bigquery_time_partitioning="DATE(createdAt)", + ) + + +def test_table_range_partitioning_dialect_option(faux_conn): + # expect table creation to fail as SQLite does not support partitioned tables + with pytest.raises(sqlite3.OperationalError): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("id", sqlalchemy.Integer), + sqlalchemy.Column("zipcode", sqlalchemy.INT), + bigquery_range_partitioning=RangePartitioning( + field="zipcode", + range_=PartitionRange( + start=0, + end=100000, + interval=2, + ), + ), + ) + + assert " ".join(faux_conn.test_data["execute"][-1][0].strip().split()) == ( + "CREATE TABLE `some_table` ( `id` INT64, `zipcode` INT64 )" + " PARTITION BY RANGE_BUCKET(zipcode, GENERATE_ARRAY(0, 100000, 2))" + ) + + +def test_table_range_partitioning_dialect_option_no_field(faux_conn): + # expect TypeError when bigquery_range_partitioning field is not defined + with pytest.raises( + AttributeError, + match="bigquery_range_partitioning expects field to be defined", + ): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("id", sqlalchemy.Integer), + sqlalchemy.Column("zipcode", sqlalchemy.FLOAT), + bigquery_range_partitioning=RangePartitioning( + range_=PartitionRange( + start=0, + end=100000, + interval=10, + ), + ), + ) + + +def test_table_range_partitioning_dialect_option_bad_column_type(faux_conn): + # expect ValueError when bigquery_range_partitioning field is not an INTEGER + with pytest.raises( + ValueError, + match=r"bigquery_range_partitioning expects field \(i\.e\. column\) data type to be INTEGER", + ): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("id", sqlalchemy.Integer), + sqlalchemy.Column("zipcode", sqlalchemy.FLOAT), + bigquery_range_partitioning=RangePartitioning( + field="zipcode", + range_=PartitionRange( + start=0, + end=100000, + interval=10, + ), + ), + ) + + +def test_table_range_partitioning_dialect_option_range_missing(faux_conn): + # expect TypeError when bigquery_range_partitioning range start or end is missing + with pytest.raises( + TypeError, + match="bigquery_range_partitioning expects range_.start to be an int, provided None", + ): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("id", sqlalchemy.Integer), + sqlalchemy.Column("zipcode", sqlalchemy.INT), + bigquery_range_partitioning=RangePartitioning(field="zipcode"), + ) + + with pytest.raises( + TypeError, + match="bigquery_range_partitioning expects range_.end to be an int, provided None", + ): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("id", sqlalchemy.Integer), + sqlalchemy.Column("zipcode", sqlalchemy.INT), + bigquery_range_partitioning=RangePartitioning( + field="zipcode", + range_=PartitionRange(start=1), + ), + ) + + +def test_table_range_partitioning_dialect_option_default_interval(faux_conn): + # expect table creation to fail as SQLite does not support partitioned tables + with pytest.raises(sqlite3.OperationalError): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("id", sqlalchemy.Integer), + sqlalchemy.Column("zipcode", sqlalchemy.INT), + bigquery_range_partitioning=RangePartitioning( + field="zipcode", + range_=PartitionRange( + start=0, + end=100000, + ), + ), + ) + + assert " ".join(faux_conn.test_data["execute"][-1][0].strip().split()) == ( + "CREATE TABLE `some_table` ( `id` INT64, `zipcode` INT64 )" + " PARTITION BY RANGE_BUCKET(zipcode, GENERATE_ARRAY(0, 100000, 1))" + ) + + +def test_time_and_range_partitioning_mutually_exclusive(faux_conn): + # expect ValueError when both bigquery_time_partitioning and bigquery_range_partitioning are provided + with pytest.raises(ValueError): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("id", sqlalchemy.Integer), + sqlalchemy.Column("createdAt", sqlalchemy.DateTime), + bigquery_range_partitioning=RangePartitioning(), + bigquery_time_partitioning=TimePartitioning(), + ) + + +def test_table_all_dialect_option(faux_conn): + # expect table creation to fail as SQLite does not support clustering and partitioned tables + with pytest.raises(sqlite3.OperationalError): + setup_table( + faux_conn, + "some_table", + sqlalchemy.Column("id", sqlalchemy.Integer), + sqlalchemy.Column("country", sqlalchemy.Text), + sqlalchemy.Column("town", sqlalchemy.Text), + sqlalchemy.Column("createdAt", sqlalchemy.DateTime), + bigquery_expiration_timestamp=datetime.datetime.fromisoformat( + "2038-01-01T00:00:00+00:00" + ), + bigquery_require_partition_filter=True, + bigquery_default_rounding_mode="ROUND_HALF_EVEN", + bigquery_clustering_fields=["country", "town"], + bigquery_time_partitioning=TimePartitioning( + field="createdAt", + type_="DAY", + expiration_ms=2592000000, + ), + ) + + assert " ".join(faux_conn.test_data["execute"][-1][0].strip().split()) == ( + "CREATE TABLE `some_table` ( `id` INT64, `country` STRING, `town` STRING, `createdAt` DATETIME )" + " PARTITION BY DATE_TRUNC(createdAt, DAY)" + " CLUSTER BY country, town" + " OPTIONS(partition_expiration_days=30.0, expiration_timestamp=TIMESTAMP '2038-01-01 00:00:00+00:00', require_partition_filter=true, default_rounding_mode='ROUND_HALF_EVEN')" + ) + + +def test_validate_friendly_name_value_type(ddl_compiler): + # expect option value to be transformed as a string expression + + assert ddl_compiler._validate_option_value_type("friendly_name", "Friendly name") + + with pytest.raises(TypeError): + ddl_compiler._validate_option_value_type("friendly_name", 1983) + + +def test_validate_expiration_timestamp_value_type(ddl_compiler): + # expect option value to be transformed as a timestamp expression + + assert ddl_compiler._validate_option_value_type( + "expiration_timestamp", + datetime.datetime.fromisoformat("2038-01-01T00:00:00+00:00"), + ) + + with pytest.raises(TypeError): + ddl_compiler._validate_option_value_type("expiration_timestamp", "2038-01-01") + + +def test_validate_require_partition_filter_type(ddl_compiler): + # expect option value to be transformed as a literal boolean + + assert ddl_compiler._validate_option_value_type("require_partition_filter", True) + assert ddl_compiler._validate_option_value_type("require_partition_filter", False) + + with pytest.raises(TypeError): + ddl_compiler._validate_option_value_type("require_partition_filter", "true") + + with pytest.raises(TypeError): + ddl_compiler._validate_option_value_type("require_partition_filter", "false") + + +def test_validate_default_rounding_mode_type(ddl_compiler): + # expect option value to be transformed as a string expression + + assert ddl_compiler._validate_option_value_type( + "default_rounding_mode", "ROUND_HALF_EVEN" + ) + + with pytest.raises(TypeError): + ddl_compiler._validate_option_value_type("default_rounding_mode", True) + + +def test_validate_unmapped_option_type(ddl_compiler): + # expect option value with no typed specified in mapping to be transformed as a string expression + + assert ddl_compiler._validate_option_value_type("unknown", "DEFAULT_IS_STRING") + + +def test_process_str_option_value(ddl_compiler): + # expect string to be transformed as a string expression + assert ddl_compiler._process_option_value("Some text") == "'Some text'" + + +def test_process_datetime_value(ddl_compiler): + # expect datetime object to be transformed as a timestamp expression + assert ( + ddl_compiler._process_option_value( + datetime.datetime.fromisoformat("2038-01-01T00:00:00+00:00") + ) + == "TIMESTAMP '2038-01-01 00:00:00+00:00'" + ) + + +def test_process_int_option_value(ddl_compiler): + # expect int to be unchanged + assert ddl_compiler._process_option_value(90) == 90 + + +def test_process_boolean_option_value(ddl_compiler): + # expect boolean to be transformed as a literal boolean expression + + assert ddl_compiler._process_option_value(True) == "true" + assert ddl_compiler._process_option_value(False) == "false" + + +def test_process_not_implementer_option_value(ddl_compiler): + # expect to raise + with pytest.raises(NotImplementedError): + ddl_compiler._process_option_value(float) From b11a11070f6e6fedfe03cf664da5a7664cec094e Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Fri, 12 Jan 2024 08:03:21 -0800 Subject: [PATCH 06/16] test: change compliance test rerun condition regex string (#982) * test: change compliance test rerun condition regex string * try sys test * add sleep to time * change exceed rate limit code to 400 * change rerun condition format * delete compliance tests to make it faster * stop on first fail * add rerun condition * change rerun condition format * add back the deleted tests * remove investigation changes and add owlbot change * black * revert README --- noxfile.py | 9 +++++---- owlbot.py | 9 +++++---- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/noxfile.py b/noxfile.py index e31f32c5..82fa27d5 100644 --- a/noxfile.py +++ b/noxfile.py @@ -394,10 +394,11 @@ def compliance(session): f"--junitxml=compliance_{session.python}_sponge_log.xml", "--reruns=3", "--reruns-delay=60", - "--only-rerun=403 Exceeded rate limits", - "--only-rerun=409 Already Exists", - "--only-rerun=404 Not found", - "--only-rerun=400 Cannot execute DML over a non-existent table", + "--only-rerun=Exceeded rate limits", + "--only-rerun=Already Exists", + "--only-rerun=Not found", + "--only-rerun=Cannot execute DML over a non-existent table", + "--only-rerun=Job exceeded rate limits", system_test_folder_path, *session.posargs, # To suppress the "Deprecated API features detected!" warning when diff --git a/owlbot.py b/owlbot.py index 22678c8b..8c3ce732 100644 --- a/owlbot.py +++ b/owlbot.py @@ -188,10 +188,11 @@ def compliance(session): f"--junitxml=compliance_{session.python}_sponge_log.xml", "--reruns=3", "--reruns-delay=60", - "--only-rerun=403 Exceeded rate limits", - "--only-rerun=409 Already Exists", - "--only-rerun=404 Not found", - "--only-rerun=400 Cannot execute DML over a non-existent table", + "--only-rerun=Exceeded rate limits", + "--only-rerun=Already Exists", + "--only-rerun=Not found", + "--only-rerun=Cannot execute DML over a non-existent table", + "--only-rerun=Job exceeded rate limits", system_test_folder_path, *session.posargs, # To suppress the "Deprecated API features detected!" warning when From fcd575510f4bd24796e7cb208c552a2cf06fbe22 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 17 Jan 2024 10:37:31 -0500 Subject: [PATCH 07/16] build(python): fix `docs` and `docfx` builds (#1006) Source-Link: https://github.com/googleapis/synthtool/commit/fac8444edd5f5526e804c306b766a271772a3e2f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 6 +++--- .kokoro/requirements.txt | 6 +++--- noxfile.py | 20 +++++++++++++++++++- 3 files changed, 25 insertions(+), 7 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 40bf9973..d8a1bbca 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:230f7fe8a0d2ed81a519cfc15c6bb11c5b46b9fb449b8b1219b3771bcb520ad2 -# created: 2023-12-09T15:16:25.430769578Z + digest: sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa +# created: 2024-01-15T16:32:08.142785673Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index e5c1ffca..bb3d6ca3 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -263,9 +263,9 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.2 \ - --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ - --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 +jinja2==3.1.3 \ + --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ + --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 # via gcp-releasetool keyring==24.2.0 \ --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ diff --git a/noxfile.py b/noxfile.py index 82fa27d5..28f000db 100644 --- a/noxfile.py +++ b/noxfile.py @@ -428,7 +428,16 @@ def docs(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", "alabaster", "geoalchemy2", "shapely", @@ -456,6 +465,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "geoalchemy2", From 88af7d17276c575624cbd612474345c62e33e741 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 6 Feb 2024 16:01:10 +0100 Subject: [PATCH 08/16] chore(deps): update all dependencies (#937) --- dev_requirements.txt | 2 +- samples/snippets/requirements-test.txt | 6 +++--- samples/snippets/requirements.txt | 28 +++++++++++++------------- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/dev_requirements.txt b/dev_requirements.txt index 1798fab5..dac67503 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -2,4 +2,4 @@ sqlalchemy>=2.0.15,<2.1.0 google-cloud-bigquery>=1.6.0 pytest===6.2.5 pytest-flake8===1.1.0 # versions 1.1.1 and above require pytest 7 -pytz==2023.3.post1 +pytz==2024.1 diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 552292b6..da1a67c7 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,10 +1,10 @@ -attrs==23.1.0 +attrs==23.2.0 click==8.1.7 -google-auth==2.25.2 +google-auth==2.27.0 google-cloud-testutils==1.4.0 iniconfig==2.0.0 packaging==23.2 -pluggy==1.3.0 +pluggy==1.4.0 py==1.11.0 pyasn1==0.5.1 pyasn1-modules==0.3.0 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index a1ea3358..4e8f5b29 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,33 +1,33 @@ -alembic==1.13.0 -certifi==2023.11.17 +alembic==1.13.1 +certifi==2024.2.2 charset-normalizer==3.3.2 -geoalchemy2==0.14.2 -google-api-core[grpc]==2.15.0 -google-auth==2.25.2 -google-cloud-bigquery==3.14.1 +geoalchemy2==0.14.3 +google-api-core[grpc]==2.16.2 +google-auth==2.27.0 +google-cloud-bigquery==3.17.1 google-cloud-core==2.4.1 google-crc32c==1.5.0 google-resumable-media==2.7.0 googleapis-common-protos==1.62.0 -greenlet==3.0.2 -grpcio==1.60.0 -grpcio-status==1.60.0 +greenlet==3.0.3 +grpcio==1.60.1 +grpcio-status==1.60.1 idna==3.6 importlib-resources==6.1.1; python_version >= '3.8' -mako==1.3.0 -markupsafe==2.1.3 +mako==1.3.2 +markupsafe==2.1.5 packaging==23.2 proto-plus==1.23.0 -protobuf==4.25.1 +protobuf==4.25.2 pyasn1==0.5.1 pyasn1-modules==0.3.0 pyparsing==3.1.1 python-dateutil==2.8.2 -pytz==2023.3.post1 +pytz==2024.1 requests==2.31.0 rsa==4.9 shapely==2.0.2 six==1.16.0 sqlalchemy===1.4.27 typing-extensions==4.9.0 -urllib3==2.1.0 +urllib3==2.2.0 From 89c5ff28861dedff3252ad2a733acc52cab3ec0e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 6 Feb 2024 11:12:40 -0500 Subject: [PATCH 09/16] build(deps): bump cryptography from 41.0.6 to 42.0.0 in /synthtool/gcp/templates/python_library/.kokoro (#1022) Source-Link: https://github.com/googleapis/synthtool/commit/e13b22b1f660c80e4c3e735a9177d2f16c4b8bdc Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 Co-authored-by: Owl Bot Co-authored-by: Chalmer Lowe --- .github/.OwlBot.lock.yaml | 4 +-- .kokoro/requirements.txt | 57 ++++++++++++++++++++++----------------- 2 files changed, 35 insertions(+), 26 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index d8a1bbca..2aefd0e9 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa -# created: 2024-01-15T16:32:08.142785673Z + digest: sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 +# created: 2024-02-06T03:20:16.660474034Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index bb3d6ca3..8c11c9f3 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,30 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.6 \ - --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ - --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ - --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ - --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ - --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ - --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ - --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ - --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ - --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ - --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ - --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ - --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ - --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ - --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ - --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ - --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ - --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ - --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ - --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ - --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ - --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ - --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ - --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae +cryptography==42.0.0 \ + --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ + --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ + --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ + --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ + --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ + --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ + --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ + --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ + --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ + --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ + --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ + --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ + --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ + --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ + --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ + --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ + --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ + --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ + --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ + --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ + --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ + --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ + --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ + --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ + --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ + --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ + --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ + --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ + --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ + --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ + --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ + --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 # via # gcp-releasetool # secretstorage From 99887dfa64b4b54f8521fa47bb681360ce35bbcd Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 8 Feb 2024 00:49:45 +0100 Subject: [PATCH 10/16] chore(deps): update dependency google-cloud-bigquery to v3.17.2 (#1023) --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 4e8f5b29..dc7898d0 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -4,7 +4,7 @@ charset-normalizer==3.3.2 geoalchemy2==0.14.3 google-api-core[grpc]==2.16.2 google-auth==2.27.0 -google-cloud-bigquery==3.17.1 +google-cloud-bigquery==3.17.2 google-cloud-core==2.4.1 google-crc32c==1.5.0 google-resumable-media==2.7.0 From 23dd876f7496cccf234c50c635279f87c6f37ae5 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 9 Feb 2024 01:18:22 +0100 Subject: [PATCH 11/16] chore(deps): update all dependencies (#1028) --- samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index dc7898d0..8ae180a5 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,8 +1,8 @@ alembic==1.13.1 certifi==2024.2.2 charset-normalizer==3.3.2 -geoalchemy2==0.14.3 -google-api-core[grpc]==2.16.2 +geoalchemy2==0.14.4 +google-api-core[grpc]==2.17.0 google-auth==2.27.0 google-cloud-bigquery==3.17.2 google-cloud-core==2.4.1 From 93c533fb5dc8c6ab593c3787fcb6fe23b7f85536 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 20 Feb 2024 12:51:31 -0800 Subject: [PATCH 12/16] build(deps): bump cryptography from 42.0.0 to 42.0.2 in .kokoro (#1032) Source-Link: https://github.com/googleapis/synthtool/commit/8d392a55db44b00b4a9b995318051e334eecdcf1 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:a0c4463fcfd9893fc172a3b3db2b6ac0c7b94ec6ad458c7dcea12d9693615ac3 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +-- .kokoro/requirements.txt | 66 +++++++++++++++++++-------------------- 2 files changed, 35 insertions(+), 35 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 2aefd0e9..51213ca0 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 -# created: 2024-02-06T03:20:16.660474034Z + digest: sha256:a0c4463fcfd9893fc172a3b3db2b6ac0c7b94ec6ad458c7dcea12d9693615ac3 +# created: 2024-02-17T12:21:23.177926195Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 8c11c9f3..f80bdcd6 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,39 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.0 \ - --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ - --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ - --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ - --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ - --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ - --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ - --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ - --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ - --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ - --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ - --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ - --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ - --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ - --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ - --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ - --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ - --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ - --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ - --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ - --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ - --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ - --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ - --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ - --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ - --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ - --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ - --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ - --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ - --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ - --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ - --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ - --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 +cryptography==42.0.2 \ + --hash=sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380 \ + --hash=sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589 \ + --hash=sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea \ + --hash=sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65 \ + --hash=sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a \ + --hash=sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3 \ + --hash=sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008 \ + --hash=sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1 \ + --hash=sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2 \ + --hash=sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635 \ + --hash=sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2 \ + --hash=sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90 \ + --hash=sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee \ + --hash=sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a \ + --hash=sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242 \ + --hash=sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12 \ + --hash=sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2 \ + --hash=sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d \ + --hash=sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be \ + --hash=sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee \ + --hash=sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6 \ + --hash=sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529 \ + --hash=sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929 \ + --hash=sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1 \ + --hash=sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6 \ + --hash=sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a \ + --hash=sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446 \ + --hash=sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9 \ + --hash=sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888 \ + --hash=sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4 \ + --hash=sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33 \ + --hash=sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f # via # gcp-releasetool # secretstorage From 92550b9bdb05a0193936b9129cbcd1f6d492af3a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 22 Feb 2024 17:32:20 +0100 Subject: [PATCH 13/16] chore(deps): update all dependencies (#1030) --- samples/snippets/requirements-test.txt | 2 +- samples/snippets/requirements.txt | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index da1a67c7..41a1d485 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,6 +1,6 @@ attrs==23.2.0 click==8.1.7 -google-auth==2.27.0 +google-auth==2.28.1 google-cloud-testutils==1.4.0 iniconfig==2.0.0 packaging==23.2 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 8ae180a5..01b2bfbd 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -2,23 +2,23 @@ alembic==1.13.1 certifi==2024.2.2 charset-normalizer==3.3.2 geoalchemy2==0.14.4 -google-api-core[grpc]==2.17.0 -google-auth==2.27.0 +google-api-core[grpc]==2.17.1 +google-auth==2.28.1 google-cloud-bigquery==3.17.2 google-cloud-core==2.4.1 google-crc32c==1.5.0 google-resumable-media==2.7.0 googleapis-common-protos==1.62.0 greenlet==3.0.3 -grpcio==1.60.1 -grpcio-status==1.60.1 +grpcio==1.62.0 +grpcio-status==1.62.0 idna==3.6 importlib-resources==6.1.1; python_version >= '3.8' mako==1.3.2 markupsafe==2.1.5 packaging==23.2 proto-plus==1.23.0 -protobuf==4.25.2 +protobuf==4.25.3 pyasn1==0.5.1 pyasn1-modules==0.3.0 pyparsing==3.1.1 @@ -26,8 +26,8 @@ python-dateutil==2.8.2 pytz==2024.1 requests==2.31.0 rsa==4.9 -shapely==2.0.2 +shapely==2.0.3 six==1.16.0 sqlalchemy===1.4.27 typing-extensions==4.9.0 -urllib3==2.2.0 +urllib3==2.2.1 From 5a49598bd9d475e46580dc79eac14bdc91715f87 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 27 Feb 2024 00:15:18 +0100 Subject: [PATCH 14/16] chore(deps): update all dependencies (#1034) --- samples/snippets/requirements-test.txt | 2 +- samples/snippets/requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 41a1d485..b39744f7 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -13,4 +13,4 @@ pytest===6.2.5 rsa==4.9 six==1.16.0 toml==0.10.2 -typing-extensions==4.9.0 +typing-extensions==4.10.0 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 01b2bfbd..a1e62c32 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -13,7 +13,7 @@ greenlet==3.0.3 grpcio==1.62.0 grpcio-status==1.62.0 idna==3.6 -importlib-resources==6.1.1; python_version >= '3.8' +importlib-resources==6.1.2; python_version >= '3.8' mako==1.3.2 markupsafe==2.1.5 packaging==23.2 @@ -29,5 +29,5 @@ rsa==4.9 shapely==2.0.3 six==1.16.0 sqlalchemy===1.4.27 -typing-extensions==4.9.0 +typing-extensions==4.10.0 urllib3==2.2.1 From 9182302991dcf2c2e81809bfde5cf8d03c0d6566 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 27 Feb 2024 13:57:01 -0800 Subject: [PATCH 15/16] build(deps): bump cryptography from 42.0.2 to 42.0.4 in .kokoro (#1035) Source-Link: https://github.com/googleapis/synthtool/commit/d895aec3679ad22aa120481f746bf9f2f325f26f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +-- .kokoro/requirements.txt | 66 +++++++++++++++++++-------------------- 2 files changed, 35 insertions(+), 35 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 51213ca0..e4e943e0 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:a0c4463fcfd9893fc172a3b3db2b6ac0c7b94ec6ad458c7dcea12d9693615ac3 -# created: 2024-02-17T12:21:23.177926195Z + digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad +# created: 2024-02-27T15:56:18.442440378Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index f80bdcd6..bda8e38c 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,39 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.2 \ - --hash=sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380 \ - --hash=sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589 \ - --hash=sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea \ - --hash=sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65 \ - --hash=sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a \ - --hash=sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3 \ - --hash=sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008 \ - --hash=sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1 \ - --hash=sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2 \ - --hash=sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635 \ - --hash=sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2 \ - --hash=sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90 \ - --hash=sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee \ - --hash=sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a \ - --hash=sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242 \ - --hash=sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12 \ - --hash=sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2 \ - --hash=sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d \ - --hash=sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be \ - --hash=sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee \ - --hash=sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6 \ - --hash=sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529 \ - --hash=sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929 \ - --hash=sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1 \ - --hash=sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6 \ - --hash=sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a \ - --hash=sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446 \ - --hash=sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9 \ - --hash=sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888 \ - --hash=sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4 \ - --hash=sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33 \ - --hash=sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f +cryptography==42.0.4 \ + --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ + --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ + --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ + --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ + --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ + --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ + --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ + --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ + --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ + --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ + --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ + --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ + --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ + --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ + --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ + --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ + --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ + --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ + --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ + --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ + --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ + --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ + --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ + --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ + --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ + --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ + --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ + --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ + --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ + --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ + --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ + --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 # via # gcp-releasetool # secretstorage From 626de155c3cb01b31d242e4f235b2e83179ca157 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 28 Feb 2024 15:58:24 -0500 Subject: [PATCH 16/16] chore(main): release 1.10.0 (#936) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 12 ++++++++++++ sqlalchemy_bigquery/version.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7c3b7ca7..52bbfe0e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,18 @@ Older versions of this project were distributed as [pybigquery][0]. [2]: https://pypi.org/project/pybigquery/#history +## [1.10.0](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.10.0) (2024-02-27) + + +### Features + +* Allow to set clustering and time partitioning options at table creation ([#928](https://github.com/googleapis/python-bigquery-sqlalchemy/issues/928)) ([c2c2958](https://github.com/googleapis/python-bigquery-sqlalchemy/commit/c2c2958886cc3c8a51c4f5fc1a8c36b65921edd9)) + + +### Bug Fixes + +* Avoid implicit join when using join with unnest ([#924](https://github.com/googleapis/python-bigquery-sqlalchemy/issues/924)) ([ac74a34](https://github.com/googleapis/python-bigquery-sqlalchemy/commit/ac74a3434c437f60b6f215ac09dea224aa406f8a)) + ## [1.9.0](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.8.0...v1.9.0) (2023-12-10) diff --git a/sqlalchemy_bigquery/version.py b/sqlalchemy_bigquery/version.py index f15b4f67..04f2b0f7 100644 --- a/sqlalchemy_bigquery/version.py +++ b/sqlalchemy_bigquery/version.py @@ -17,4 +17,4 @@ # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -__version__ = "1.9.0" +__version__ = "1.10.0"