From ffc491fd75eb06a75261d11fd5ad7e8d7d6fe676 Mon Sep 17 00:00:00 2001 From: Sharoon Thomas Date: Thu, 26 Oct 2023 15:16:45 -0400 Subject: [PATCH 01/62] feat!: Support SQLAlchemy 2.0, drop support for 1.x This patch makes this library backward incompatible for versions of SQLAlchemy < 2.0. Fixes #510 --- AUTHORS | 1 + CONTRIBUTING.rst | 4 + README.rst | 16 +- noxfile.py | 4 +- setup.py | 2 +- sqlalchemy_bigquery/base.py | 20 +- testing/constraints-3.9.txt | 1 - .../test_dialect_compliance.py | 4 +- tests/system/test__struct.py | 45 +-- tests/system/test_geography.py | 18 +- tests/system/test_sqlalchemy_bigquery.py | 261 +++++++++++------- tests/unit/test__struct.py | 4 +- tests/unit/test_compliance.py | 46 ++- tests/unit/test_geography.py | 10 +- tests/unit/test_select.py | 40 +-- tests/unit/test_sqlalchemy_bigquery.py | 4 +- 16 files changed, 282 insertions(+), 198 deletions(-) diff --git a/AUTHORS b/AUTHORS index 5daa663b..fc5345ee 100644 --- a/AUTHORS +++ b/AUTHORS @@ -19,6 +19,7 @@ Maksym Voitko Maxim Zudilov (mxmzdlv) Maxime Beauchemin (mistercrunch) Romain Rigaux +Sharoon Thomas (sharoonthomas) Sumedh Sakdeo Tim Swast (tswast) Vince Broz diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 9d2ca02f..25cf4de3 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -80,6 +80,10 @@ We use `nox `__ to instrument our tests. The unit tests and system tests are described in the ``noxfile.py`` files in each directory. +- System tests create a live BQ dataset and destroys it with tests:: + + $ nox -s system + .. nox: https://pypi.org/project/nox/ ***************************************** diff --git a/README.rst b/README.rst index a2036289..995f9017 100644 --- a/README.rst +++ b/README.rst @@ -35,7 +35,8 @@ In order to use this library, you first need to go through the following steps: .. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html .. note:: - This library is only compatible with SQLAlchemy versions < 2.0.0 + This library is only compatible with SQLAlchemy versions >= 2.0.0 + For SQLAlchemy versions < 2.0.0, use `sqlalchemy-bigquery < 0.20.0`_. Installation ------------ @@ -104,11 +105,11 @@ SQLAlchemy .. code-block:: python from sqlalchemy import * - from sqlalchemy.engine import create_engine - from sqlalchemy.schema import * engine = create_engine('bigquery://project') - table = Table('dataset.table', MetaData(bind=engine), autoload=True) - print(select([func.count('*')], from_obj=table).scalar()) + metadata_obj = MetaData() + table = Table('dataset.table', metadata_obj, autoload_with=engine) + with engine.connect() as conn: + print(conn.execute(select(func.count("*")).select_from(table)).scalar()) Project ^^^^^^^ @@ -204,7 +205,8 @@ Note that specifying a default dataset doesn't restrict execution of queries to engine = create_engine('bigquery://project/dataset_a') # This will still execute and return rows from dataset_b - engine.execute('SELECT * FROM dataset_b.table').fetchall() + with engine.connect() as conn: + conn.execute(sqlalchemy.text('SELECT * FROM dataset_b.table')).fetchall() Connection String Parameters @@ -281,7 +283,7 @@ If you need additional control, you can supply a BigQuery client of your own: engine = create_engine( 'bigquery://some-project/some-dataset?user_supplied_client=True', - connect_args={'client': custom_bq_client}, + connect_args={'client': custom_bq_client}, ) diff --git a/noxfile.py b/noxfile.py index b97557f4..c77e5da3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -359,7 +359,7 @@ def compliance(session): session.skip("Compliance tests were not found") session.install("--pre", "grpcio") - session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy<2.0.0") + session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=2.0.0,<2.1") session.install( "mock", "pytest", @@ -514,7 +514,7 @@ def prerelease_deps(session): prerel_deps = [ "protobuf", - "sqlalchemy<2.0.0", + "sqlalchemy>=2.0,<2.1", # dependency of grpc "six", "googleapis-common-protos", diff --git a/setup.py b/setup.py index a4b145f0..5f72cee6 100644 --- a/setup.py +++ b/setup.py @@ -100,7 +100,7 @@ def readme(): "google-auth>=1.25.0,<3.0.0dev", # Work around pip wack. "google-cloud-bigquery>=2.25.2,<4.0.0dev", "packaging", - "sqlalchemy>=1.2.0,<2.0.0dev", + "sqlalchemy>=2.0,<2.1", ], extras_require=extras, python_requires=">=3.8, <3.12", diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index 5297f223..6229881b 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -214,7 +214,7 @@ def visit_table_valued_alias(self, element, **kw): # For example, given SQLAlchemy code: # # print( - # select([func.unnest(foo.c.objects).alias('foo_objects').column]) + # select(func.unnest(foo.c.objects).alias('foo_objects').column) # .compile(engine)) # # Left to it's own devices, SQLAlchemy would outout: @@ -791,6 +791,14 @@ def __init__( @classmethod def dbapi(cls): + """ + Use `import_dbapi()` instead. + Maintained for backward compatibility. + """ + return dbapi + + @classmethod + def import_dbapi(cls): return dbapi @staticmethod @@ -963,7 +971,10 @@ def _get_table(self, connection, table_name, schema=None): raise NoSuchTableError(table_name) return table - def has_table(self, connection, table_name, schema=None): + def has_table(self, connection, table_name, schema=None, **kw): + """ + No kw are supported + """ try: self._get_table(connection, table_name, schema) return True @@ -1057,7 +1068,10 @@ def __init__(self, *args, **kwargs): raise TypeError("The unnest function requires a single argument.") arg = args[0] if isinstance(arg, sqlalchemy.sql.expression.ColumnElement): - if not isinstance(arg.type, sqlalchemy.sql.sqltypes.ARRAY): + if not ( + isinstance(arg.type, sqlalchemy.sql.sqltypes.ARRAY) + or isinstance(arg.type.impl, sqlalchemy.sql.sqltypes.ARRAY) + ): raise TypeError("The argument to unnest must have an ARRAY type.") self.type = arg.type.item_type super().__init__(*args, **kwargs) diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index 77dc823a..e69de29b 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -1 +0,0 @@ -sqlalchemy>=1.4.13,<2.0.0 diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index a79f2818..05db6eee 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -220,7 +220,7 @@ def test_select_exists(self, connection): stuff = self.tables.stuff eq_( connection.execute( - select([stuff.c.id]).where( + select(stuff.c.id).where( and_( stuff.c.id == 1, exists().where(stuff.c.data == "some data"), @@ -234,7 +234,7 @@ def test_select_exists_false(self, connection): stuff = self.tables.stuff eq_( connection.execute( - select([stuff.c.id]).where(exists().where(stuff.c.data == "no data")) + select(stuff.c.id).where(exists().where(stuff.c.data == "no data")) ).fetchall(), [], ) diff --git a/tests/system/test__struct.py b/tests/system/test__struct.py index bb7958c9..69d2ba76 100644 --- a/tests/system/test__struct.py +++ b/tests/system/test__struct.py @@ -54,7 +54,7 @@ def test_struct(engine, bigquery_dataset, metadata): ) ) - assert list(conn.execute(sqlalchemy.select([table]))) == [ + assert list(conn.execute(sqlalchemy.select(table))) == [ ( { "name": "bob", @@ -62,16 +62,16 @@ def test_struct(engine, bigquery_dataset, metadata): }, ) ] - assert list(conn.execute(sqlalchemy.select([table.c.person.NAME]))) == [("bob",)] - assert list(conn.execute(sqlalchemy.select([table.c.person.children[0]]))) == [ + assert list(conn.execute(sqlalchemy.select(table.c.person.NAME))) == [("bob",)] + assert list(conn.execute(sqlalchemy.select(table.c.person.children[0]))) == [ ({"name": "billy", "bdate": datetime.date(2020, 1, 1)},) ] - assert list( - conn.execute(sqlalchemy.select([table.c.person.children[0].bdate])) - ) == [(datetime.date(2020, 1, 1),)] + assert list(conn.execute(sqlalchemy.select(table.c.person.children[0].bdate))) == [ + (datetime.date(2020, 1, 1),) + ] assert list( conn.execute( - sqlalchemy.select([table]).where(table.c.person.children[0].NAME == "billy") + sqlalchemy.select(table).where(table.c.person.children[0].NAME == "billy") ) ) == [ ( @@ -84,7 +84,7 @@ def test_struct(engine, bigquery_dataset, metadata): assert ( list( conn.execute( - sqlalchemy.select([table]).where( + sqlalchemy.select(table).where( table.c.person.children[0].NAME == "sally" ) ) @@ -99,21 +99,22 @@ def test_complex_literals_pr_67(engine, bigquery_dataset, metadata): # Simple select example: table_name = f"{bigquery_dataset}.test_comples_literals_pr_67" - engine.execute( - f""" - create table {table_name} as ( - select 'a' as id, - struct(1 as x__count, 2 as y__count, 3 as z__count) as dimensions + with engine.connect() as conn: + conn.execute( + sqlalchemy.text( + f""" + create table {table_name} as ( + select 'a' as id, + struct(1 as x__count, 2 as y__count, 3 as z__count) as dimensions + ) + """ ) - """ - ) + ) table = sqlalchemy.Table(table_name, metadata, autoload_with=engine) got = str( - sqlalchemy.select([(table.c.dimensions.x__count + 5).label("c")]).compile( - engine - ) + sqlalchemy.select((table.c.dimensions.x__count + 5).label("c")).compile(engine) ) want = ( f"SELECT (`{table_name}`.`dimensions`.x__count) + %(param_1:INT64)s AS `c` \n" @@ -149,9 +150,11 @@ def test_unnest_and_struct_access_233(engine, bigquery_dataset, metadata): conn.execute( mock_table.insert(), - dict(mock_id="x"), - dict(mock_id="y"), - dict(mock_id="z"), + [ + dict(mock_id="x"), + dict(mock_id="y"), + dict(mock_id="z"), + ], ) conn.execute( another_mock_table.insert(), diff --git a/tests/system/test_geography.py b/tests/system/test_geography.py index 7189eebb..50939513 100644 --- a/tests/system/test_geography.py +++ b/tests/system/test_geography.py @@ -74,7 +74,7 @@ def test_geoalchemy2_core(bigquery_dataset): from sqlalchemy.sql import select assert sorted( - (r.name, r.geog.desc[:4]) for r in conn.execute(select([lake_table])) + (r.name, r.geog.desc[:4]) for r in conn.execute(select(lake_table)) ) == [("Garde", "0103"), ("Majeur", "0103"), ("Orta", "0103")] # Spatial query @@ -82,26 +82,32 @@ def test_geoalchemy2_core(bigquery_dataset): from sqlalchemy import func [[result]] = conn.execute( - select([lake_table.c.name], func.ST_Contains(lake_table.c.geog, "POINT(4 1)")) + select(lake_table.c.name).where( + func.ST_Contains(lake_table.c.geog, "POINT(4 1)") + ) ) assert result == "Orta" assert sorted( (r.name, int(r.area)) for r in conn.execute( - select([lake_table.c.name, lake_table.c.geog.ST_AREA().label("area")]) + select(lake_table.c.name, lake_table.c.geog.ST_AREA().label("area")) ) ) == [("Garde", 49452374328), ("Majeur", 12364036567), ("Orta", 111253664228)] # Extra: Make sure we can save a retrieved value back: - [[geog]] = conn.execute(select([lake_table.c.geog], lake_table.c.name == "Garde")) + [[geog]] = conn.execute( + select(lake_table.c.geog).where(lake_table.c.name == "Garde") + ) conn.execute(lake_table.insert().values(name="test", geog=geog)) assert ( int( list( conn.execute( - select([lake_table.c.geog.st_area()], lake_table.c.name == "test") + select(lake_table.c.geog.st_area()).where( + lake_table.c.name == "test" + ) ) )[0][0] ) @@ -122,7 +128,7 @@ def test_geoalchemy2_core(bigquery_dataset): int( list( conn.execute( - select([lake_table.c.geog.st_area()], lake_table.c.name == "test2") + select(lake_table.c.geog.st_area(), lake_table.c.name == "test2") ) )[0][0] ) diff --git a/tests/system/test_sqlalchemy_bigquery.py b/tests/system/test_sqlalchemy_bigquery.py index 62b534ff..26debdf9 100644 --- a/tests/system/test_sqlalchemy_bigquery.py +++ b/tests/system/test_sqlalchemy_bigquery.py @@ -155,24 +155,22 @@ def engine_with_location(): @pytest.fixture(scope="session") def table(engine, bigquery_dataset): - return Table(f"{bigquery_dataset}.sample", MetaData(bind=engine), autoload=True) + return Table(f"{bigquery_dataset}.sample", MetaData(), autoload_with=engine) @pytest.fixture(scope="session") def table_using_test_dataset(engine_using_test_dataset): - return Table("sample", MetaData(bind=engine_using_test_dataset), autoload=True) + return Table("sample", MetaData(), autoload_with=engine_using_test_dataset) @pytest.fixture(scope="session") def table_one_row(engine, bigquery_dataset): - return Table( - f"{bigquery_dataset}.sample_one_row", MetaData(bind=engine), autoload=True - ) + return Table(f"{bigquery_dataset}.sample_one_row", MetaData(), autoload_with=engine) @pytest.fixture(scope="session") def table_dml(engine, bigquery_empty_table): - return Table(bigquery_empty_table, MetaData(bind=engine), autoload=True) + return Table(bigquery_empty_table, MetaData(), autoload_with=engine) @pytest.fixture(scope="session") @@ -214,7 +212,7 @@ def query(table): .label("outer") ) query = ( - select([col1, col2, col3]) + select(col1, col2, col3) .where(col1 < "2017-01-01 00:00:00") .group_by(col1) .order_by(col2) @@ -225,37 +223,47 @@ def query(table): def test_engine_with_dataset(engine_using_test_dataset, bigquery_dataset): - rows = engine_using_test_dataset.execute("SELECT * FROM sample_one_row").fetchall() - assert list(rows[0]) == ONE_ROW_CONTENTS + with engine_using_test_dataset.connect() as conn: + rows = conn.execute(sqlalchemy.text("SELECT * FROM sample_one_row")).fetchall() + assert list(rows[0]) == ONE_ROW_CONTENTS - table_one_row = Table( - "sample_one_row", MetaData(bind=engine_using_test_dataset), autoload=True - ) - rows = table_one_row.select(use_labels=True).execute().fetchall() - assert list(rows[0]) == ONE_ROW_CONTENTS_EXPANDED + table_one_row = Table( + "sample_one_row", MetaData(), autoload_with=engine_using_test_dataset + ) + rows = conn.execute( + table_one_row.select().set_label_style( + sqlalchemy.LABEL_STYLE_TABLENAME_PLUS_COL + ) + ).fetchall() + assert list(rows[0]) == ONE_ROW_CONTENTS_EXPANDED - table_one_row = Table( - f"{bigquery_dataset}.sample_one_row", - MetaData(bind=engine_using_test_dataset), - autoload=True, - ) - rows = table_one_row.select(use_labels=True).execute().fetchall() - # verify that we are pulling from the specifically-named dataset, - # instead of pulling from the default dataset of the engine (which - # does not have this table at all) - assert list(rows[0]) == ONE_ROW_CONTENTS_EXPANDED + table_one_row = Table( + f"{bigquery_dataset}.sample_one_row", + MetaData(), + autoload_with=engine_using_test_dataset, + ) + rows = conn.execute( + table_one_row.select().set_label_style( + sqlalchemy.LABEL_STYLE_TABLENAME_PLUS_COL + ) + ).fetchall() + # verify that we are pulling from the specifically-named dataset, + # instead of pulling from the default dataset of the engine (which + # does not have this table at all) + assert list(rows[0]) == ONE_ROW_CONTENTS_EXPANDED def test_dataset_location( engine_with_location, bigquery_dataset, bigquery_regional_dataset ): - rows = engine_with_location.execute( - f"SELECT * FROM {bigquery_regional_dataset}.sample_one_row" - ).fetchall() - assert list(rows[0]) == ONE_ROW_CONTENTS + with engine_with_location.connect() as conn: + rows = conn.execute( + sqlalchemy.text(f"SELECT * FROM {bigquery_regional_dataset}.sample_one_row") + ).fetchall() + assert list(rows[0]) == ONE_ROW_CONTENTS -def test_reflect_select(table, table_using_test_dataset): +def test_reflect_select(table, engine_using_test_dataset, table_using_test_dataset): for table in [table, table_using_test_dataset]: assert table.comment == "A sample table containing most data types." @@ -281,56 +289,73 @@ def test_reflect_select(table, table_using_test_dataset): # when a destination table is specified, even though no # destination table is specified. When this test was written, # `use_labels` was forced by the dialect. - rows = table.select(use_labels=True).execute().fetchall() - assert len(rows) == 1000 + with engine_using_test_dataset.connect() as conn: + rows = conn.execute( + table.select().set_label_style( + sqlalchemy.LABEL_STYLE_TABLENAME_PLUS_COL + ) + ).fetchall() + assert len(rows) == 1000 def test_content_from_raw_queries(engine, bigquery_dataset): - rows = engine.execute(f"SELECT * FROM {bigquery_dataset}.sample_one_row").fetchall() - assert list(rows[0]) == ONE_ROW_CONTENTS + with engine.connect() as conn: + rows = conn.execute( + sqlalchemy.text(f"SELECT * FROM {bigquery_dataset}.sample_one_row") + ).fetchall() + assert list(rows[0]) == ONE_ROW_CONTENTS def test_record_content_from_raw_queries(engine, bigquery_dataset): - rows = engine.execute( - f"SELECT record.name FROM {bigquery_dataset}.sample_one_row" - ).fetchall() - assert rows[0][0] == "John Doe" + with engine.connect() as conn: + rows = conn.execute( + sqlalchemy.text( + f"SELECT record.name FROM {bigquery_dataset}.sample_one_row" + ) + ).fetchall() + assert rows[0][0] == "John Doe" def test_content_from_reflect(engine, table_one_row): - rows = table_one_row.select(use_labels=True).execute().fetchall() - assert list(rows[0]) == ONE_ROW_CONTENTS_EXPANDED + with engine.connect() as conn: + rows = conn.execute( + table_one_row.select().set_label_style( + sqlalchemy.LABEL_STYLE_TABLENAME_PLUS_COL + ) + ).fetchall() + assert list(rows[0]) == ONE_ROW_CONTENTS_EXPANDED def test_unicode(engine, table_one_row): unicode_str = "白人看不懂" - returned_str = sqlalchemy.select( - [expression.bindparam("好", unicode_str)], - from_obj=table_one_row, - ).scalar() + with engine.connect() as conn: + returned_str = conn.execute( + sqlalchemy.select(expression.bindparam("好", unicode_str)).select_from( + table_one_row + ) + ).scalar() assert returned_str == unicode_str def test_reflect_select_shared_table(engine): one_row = Table( - "bigquery-public-data.samples.natality", MetaData(bind=engine), autoload=True + "bigquery-public-data.samples.natality", MetaData(), autoload_with=engine ) - row = one_row.select().limit(1).execute().first() - assert len(row) >= 1 + with engine.connect() as conn: + row = conn.execute(one_row.select().limit(1)).first() + assert len(row) >= 1 def test_reflect_table_does_not_exist(engine, bigquery_dataset): with pytest.raises(NoSuchTableError): Table( f"{bigquery_dataset}.table_does_not_exist", - MetaData(bind=engine), - autoload=True, + MetaData(), + autoload_with=engine, ) assert ( - Table( - f"{bigquery_dataset}.table_does_not_exist", MetaData(bind=engine) - ).exists() + sqlalchemy.inspect(engine).has_table(f"{bigquery_dataset}.table_does_not_exist") is False ) @@ -339,18 +364,18 @@ def test_reflect_dataset_does_not_exist(engine): with pytest.raises(NoSuchTableError): Table( "dataset_does_not_exist.table_does_not_exist", - MetaData(bind=engine), - autoload=True, + MetaData(), + autoload_with=engine, ) def test_tables_list(engine, engine_using_test_dataset, bigquery_dataset): - tables = engine.table_names() + tables = sqlalchemy.inspect(engine).get_table_names() assert f"{bigquery_dataset}.sample" in tables assert f"{bigquery_dataset}.sample_one_row" in tables assert f"{bigquery_dataset}.sample_view" not in tables - tables = engine_using_test_dataset.table_names() + tables = sqlalchemy.inspect(engine_using_test_dataset).get_table_names() assert "sample" in tables assert "sample_one_row" in tables assert "sample_view" not in tables @@ -377,13 +402,13 @@ def test_nested_labels(engine, table): sqlalchemy.func.sum(col.label("inner")).label("outer") ).over(), sqlalchemy.func.sum( - sqlalchemy.case([[sqlalchemy.literal(True), col.label("inner")]]).label( + sqlalchemy.case((sqlalchemy.literal(True), col.label("inner"))).label( "outer" ) ), sqlalchemy.func.sum( sqlalchemy.func.sum( - sqlalchemy.case([[sqlalchemy.literal(True), col.label("inner")]]).label( + sqlalchemy.case((sqlalchemy.literal(True), col.label("inner"))).label( "middle" ) ).label("outer") @@ -410,7 +435,7 @@ def test_session_query( col_concat, func.avg(table.c.integer), func.sum( - case([(table.c.boolean == sqlalchemy.literal(True), 1)], else_=0) + case((table.c.boolean == sqlalchemy.literal(True), 1), else_=0) ), ) .group_by(table.c.string, col_concat) @@ -443,13 +468,14 @@ def test_custom_expression( ): """GROUP BY clause should use labels instead of expressions""" q = query(table) - result = engine.execute(q).fetchall() - assert len(result) > 0 + with engine.connect() as conn: + result = conn.execute(q).fetchall() + assert len(result) > 0 q = query(table_using_test_dataset) - result = engine_using_test_dataset.execute(q).fetchall() - - assert len(result) > 0 + with engine_using_test_dataset.connect() as conn: + result = conn.execute(q).fetchall() + assert len(result) > 0 def test_compiled_query_literal_binds( @@ -457,15 +483,17 @@ def test_compiled_query_literal_binds( ): q = query(table) compiled = q.compile(engine, compile_kwargs={"literal_binds": True}) - result = engine.execute(compiled).fetchall() - assert len(result) > 0 + with engine.connect() as conn: + result = conn.execute(compiled).fetchall() + assert len(result) > 0 q = query(table_using_test_dataset) compiled = q.compile( engine_using_test_dataset, compile_kwargs={"literal_binds": True} ) - result = engine_using_test_dataset.execute(compiled).fetchall() - assert len(result) > 0 + with engine_using_test_dataset.connect() as conn: + result = conn.execute(compiled).fetchall() + assert len(result) > 0 @pytest.mark.parametrize( @@ -494,31 +522,46 @@ def test_joins(session, table, table_one_row): def test_querying_wildcard_tables(engine): table = Table( - "bigquery-public-data.noaa_gsod.gsod*", MetaData(bind=engine), autoload=True + "bigquery-public-data.noaa_gsod.gsod*", MetaData(), autoload_with=engine ) - rows = table.select().limit(1).execute().first() - assert len(rows) > 0 + with engine.connect() as conn: + rows = conn.execute(table.select().limit(1)).first() + assert len(rows) > 0 def test_dml(engine, session, table_dml): - # test insert - engine.execute(table_dml.insert(ONE_ROW_CONTENTS_DML)) - result = table_dml.select(use_labels=True).execute().fetchall() - assert len(result) == 1 - - # test update - session.query(table_dml).filter(table_dml.c.string == "test").update( - {"string": "updated_row"}, synchronize_session=False - ) - updated_result = table_dml.select(use_labels=True).execute().fetchone() - assert updated_result[table_dml.c.string] == "updated_row" + """ + Test DML operations on a table with no data. This table is created + in the `bigquery_empty_table` fixture. - # test delete - session.query(table_dml).filter(table_dml.c.string == "updated_row").delete( - synchronize_session=False - ) - result = table_dml.select(use_labels=True).execute().fetchall() - assert len(result) == 0 + Modern versions of sqlalchemy does not really require setting the + label style. This has been maintained to retain this test. + """ + # test insert + with engine.connect() as conn: + conn.execute(table_dml.insert().values(ONE_ROW_CONTENTS_DML)) + result = conn.execute( + table_dml.select().set_label_style(sqlalchemy.LABEL_STYLE_DEFAULT) + ).fetchall() + assert len(result) == 1 + + # test update + session.query(table_dml).filter(table_dml.c.string == "test").update( + {"string": "updated_row"}, synchronize_session=False + ) + updated_result = conn.execute( + table_dml.select().set_label_style(sqlalchemy.LABEL_STYLE_DEFAULT) + ).fetchone() + assert updated_result._mapping[table_dml.c.string] == "updated_row" + + # test delete + session.query(table_dml).filter(table_dml.c.string == "updated_row").delete( + synchronize_session=False + ) + result = conn.execute( + table_dml.select().set_label_style(sqlalchemy.LABEL_STYLE_DEFAULT) + ).fetchall() + assert len(result) == 0 def test_create_table(engine, bigquery_dataset): @@ -679,16 +722,34 @@ def test_invalid_table_reference( def test_has_table(engine, engine_using_test_dataset, bigquery_dataset): - assert engine.has_table("sample", bigquery_dataset) is True - assert engine.has_table(f"{bigquery_dataset}.sample") is True - assert engine.has_table(f"{bigquery_dataset}.nonexistent_table") is False - assert engine.has_table("nonexistent_table", "nonexistent_dataset") is False + assert sqlalchemy.inspect(engine).has_table("sample", bigquery_dataset) is True + assert sqlalchemy.inspect(engine).has_table(f"{bigquery_dataset}.sample") is True + assert ( + sqlalchemy.inspect(engine).has_table(f"{bigquery_dataset}.nonexistent_table") + is False + ) + assert ( + sqlalchemy.inspect(engine).has_table("nonexistent_table", "nonexistent_dataset") + is False + ) - assert engine_using_test_dataset.has_table("sample") is True - assert engine_using_test_dataset.has_table("sample", bigquery_dataset) is True - assert engine_using_test_dataset.has_table(f"{bigquery_dataset}.sample") is True + assert sqlalchemy.inspect(engine_using_test_dataset).has_table("sample") is True + assert ( + sqlalchemy.inspect(engine_using_test_dataset).has_table( + "sample", bigquery_dataset + ) + is True + ) + assert ( + sqlalchemy.inspect(engine_using_test_dataset).has_table( + f"{bigquery_dataset}.sample" + ) + is True + ) - assert engine_using_test_dataset.has_table("sample_alt") is False + assert ( + sqlalchemy.inspect(engine_using_test_dataset).has_table("sample_alt") is False + ) def test_distinct_188(engine, bigquery_dataset): @@ -735,7 +796,7 @@ def test_huge_in(): try: assert list( conn.execute( - sqlalchemy.select([sqlalchemy.literal(-1).in_(list(range(99999)))]) + sqlalchemy.select(sqlalchemy.literal(-1).in_(list(range(99999)))) ) ) == [(False,)] except Exception: @@ -765,7 +826,7 @@ def test_unnest(engine, bigquery_dataset): conn.execute( table.insert(), [dict(objects=["a", "b", "c"]), dict(objects=["x", "y"])] ) - query = select([func.unnest(table.c.objects).alias("foo_objects").column]) + query = select(func.unnest(table.c.objects).alias("foo_objects").column) compiled = str(query.compile(engine)) assert " ".join(compiled.strip().split()) == ( f"SELECT `foo_objects`" @@ -800,10 +861,8 @@ def test_unnest_with_cte(engine, bigquery_dataset): ) selectable = select(table.c).select_from(table).cte("cte") query = select( - [ - selectable.c.foo, - func.unnest(selectable.c.bars).column_valued("unnest_bars"), - ] + selectable.c.foo, + func.unnest(selectable.c.bars).column_valued("unnest_bars"), ).select_from(selectable) compiled = str(query.compile(engine)) assert " ".join(compiled.strip().split()) == ( diff --git a/tests/unit/test__struct.py b/tests/unit/test__struct.py index 77577066..6e7c7a3d 100644 --- a/tests/unit/test__struct.py +++ b/tests/unit/test__struct.py @@ -84,7 +84,7 @@ def _col(): ) def test_struct_traversal_project(faux_conn, expr, sql): sql = f"SELECT {sql} AS `anon_1` \nFROM `t`" - assert str(sqlalchemy.select([expr]).compile(faux_conn.engine)) == sql + assert str(sqlalchemy.select(expr).compile(faux_conn.engine)) == sql @pytest.mark.parametrize( @@ -117,7 +117,7 @@ def test_struct_traversal_project(faux_conn, expr, sql): ) def test_struct_traversal_filter(faux_conn, expr, sql, param=1): want = f"SELECT `t`.`person` \nFROM `t`, `t` \nWHERE {sql}" - got = str(sqlalchemy.select([_col()]).where(expr).compile(faux_conn.engine)) + got = str(sqlalchemy.select(_col()).where(expr).compile(faux_conn.engine)) assert got == want diff --git a/tests/unit/test_compliance.py b/tests/unit/test_compliance.py index fd1fbb83..630d5058 100644 --- a/tests/unit/test_compliance.py +++ b/tests/unit/test_compliance.py @@ -52,8 +52,8 @@ def some_table(connection): def test_distinct_selectable_in_unions(faux_conn): table = some_table(faux_conn) - s1 = select([table]).where(table.c.id == 2).distinct() - s2 = select([table]).where(table.c.id == 3).distinct() + s1 = select(table).where(table.c.id == 2).distinct() + s2 = select(table).where(table.c.id == 3).distinct() u1 = union(s1, s2).limit(2) assert_result(faux_conn, u1.order_by(u1.c.id), [(2, 2, 3), (3, 3, 4)]) @@ -62,7 +62,7 @@ def test_distinct_selectable_in_unions(faux_conn): def test_limit_offset_aliased_selectable_in_unions(faux_conn): table = some_table(faux_conn) s1 = ( - select([table]) + select(table) .where(table.c.id == 2) .limit(1) .order_by(table.c.id) @@ -70,7 +70,7 @@ def test_limit_offset_aliased_selectable_in_unions(faux_conn): .select() ) s2 = ( - select([table]) + select(table) .where(table.c.id == 3) .limit(1) .order_by(table.c.id) @@ -93,16 +93,14 @@ def test_percent_sign_round_trip(faux_conn, metadata): faux_conn.execute(t.insert(), dict(data="some %% other value")) eq_( faux_conn.scalar( - select([t.c.data]).where(t.c.data == literal_column("'some % value'")) + select(t.c.data).where(t.c.data == literal_column("'some % value'")) ), "some % value", ) eq_( faux_conn.scalar( - select([t.c.data]).where( - t.c.data == literal_column("'some %% other value'") - ) + select(t.c.data).where(t.c.data == literal_column("'some %% other value'")) ), "some %% other value", ) @@ -113,7 +111,7 @@ def test_empty_set_against_integer(faux_conn): table = some_table(faux_conn) stmt = ( - select([table.c.id]) + select(table.c.id) .where(table.c.x.in_(sqlalchemy.bindparam("q", expanding=True))) .order_by(table.c.id) ) @@ -124,19 +122,15 @@ def test_empty_set_against_integer(faux_conn): @sqlalchemy_1_3_or_higher def test_null_in_empty_set_is_false(faux_conn): stmt = select( - [ - sqlalchemy.case( - [ - ( - sqlalchemy.null().in_( - sqlalchemy.bindparam("foo", value=(), expanding=True) - ), - sqlalchemy.true(), - ) - ], - else_=sqlalchemy.false(), - ) - ] + sqlalchemy.case( + ( + sqlalchemy.null().in_( + sqlalchemy.bindparam("foo", value=(), expanding=True) + ), + sqlalchemy.true(), + ), + else_=sqlalchemy.false(), + ) ) in_(faux_conn.execute(stmt).fetchone()[0], (False, 0)) @@ -170,12 +164,12 @@ def test_likish(faux_conn, meth, arg, expected): ], ) expr = getattr(table.c.data, meth)(arg) - rows = {value for value, in faux_conn.execute(select([table.c.id]).where(expr))} + rows = {value for value, in faux_conn.execute(select(table.c.id).where(expr))} eq_(rows, expected) all = {i for i in range(1, 11)} expr = sqlalchemy.not_(expr) - rows = {value for value, in faux_conn.execute(select([table.c.id]).where(expr))} + rows = {value for value, in faux_conn.execute(select(table.c.id).where(expr))} eq_(rows, all - expected) @@ -196,9 +190,7 @@ def test_group_by_composed(faux_conn): ) expr = (table.c.x + table.c.y).label("lx") - stmt = ( - select([sqlalchemy.func.count(table.c.id), expr]).group_by(expr).order_by(expr) - ) + stmt = select(sqlalchemy.func.count(table.c.id), expr).group_by(expr).order_by(expr) assert_result(faux_conn, stmt, [(1, 3), (1, 5), (1, 7)]) diff --git a/tests/unit/test_geography.py b/tests/unit/test_geography.py index 6924ade0..93b7eb37 100644 --- a/tests/unit/test_geography.py +++ b/tests/unit/test_geography.py @@ -76,7 +76,7 @@ def test_geoalchemy2_core(faux_conn, last_query): from sqlalchemy.sql import select try: - conn.execute(select([lake_table])) + conn.execute(select(lake_table)) except Exception: pass # sqlite had no special functions :) last_query( @@ -89,8 +89,8 @@ def test_geoalchemy2_core(faux_conn, last_query): try: conn.execute( - select( - [lake_table.c.name], func.ST_Contains(lake_table.c.geog, "POINT(4 1)") + select(lake_table.c.name).where( + func.ST_Contains(lake_table.c.geog, "POINT(4 1)") ) ) except Exception: @@ -104,7 +104,7 @@ def test_geoalchemy2_core(faux_conn, last_query): try: conn.execute( - select([lake_table.c.name, lake_table.c.geog.ST_Area().label("area")]) + select(lake_table.c.name, lake_table.c.geog.ST_Area().label("area")) ) except Exception: pass # sqlite had no special functions :) @@ -171,7 +171,7 @@ def test_calling_st_functions_that_dont_take_geographies(faux_conn, last_query): from sqlalchemy import select, func try: - faux_conn.execute(select([func.ST_GeogFromText("point(0 0)")])) + faux_conn.execute(select(func.ST_GeogFromText("point(0 0)"))) except Exception: pass # sqlite had no special functions :) diff --git a/tests/unit/test_select.py b/tests/unit/test_select.py index ee5e01cb..55acf4a0 100644 --- a/tests/unit/test_select.py +++ b/tests/unit/test_select.py @@ -38,7 +38,7 @@ def test_labels_not_forced(faux_conn): table = setup_table(faux_conn, "t", sqlalchemy.Column("id", sqlalchemy.Integer)) - result = faux_conn.execute(sqlalchemy.select([table.c.id])) + result = faux_conn.execute(sqlalchemy.select(table.c.id)) assert result.keys() == ["id"] # Look! Just the column name! @@ -154,14 +154,18 @@ def test_typed_parameters(faux_conn, type_, val, btype, vrep): {}, ) - assert list(map(list, faux_conn.execute(sqlalchemy.select([table])))) == [[val]] * 2 + assert list(map(list, faux_conn.execute(sqlalchemy.select(table)))) == [[val]] * 2 assert faux_conn.test_data["execute"][-1][0] == "SELECT `t`.`foo` \nFROM `t`" assert ( list( map( list, - faux_conn.execute(sqlalchemy.select([table.c.foo], use_labels=True)), + faux_conn.execute( + sqlalchemy.select(table.c.foo).set_label_style( + sqlalchemy.LABEL_STYLE_TABLENAME_PLUS_COL + ) + ), ) ) == [[val]] * 2 @@ -183,7 +187,7 @@ def test_select_struct(faux_conn, metadata): faux_conn.ex("create table t (x RECORD)") faux_conn.ex("""insert into t values ('{"y": 1}')""") - row = list(faux_conn.execute(sqlalchemy.select([table])))[0] + row = list(faux_conn.execute(sqlalchemy.select(table)))[0] # We expect the raw string, because sqlite3, unlike BigQuery # doesn't deserialize for us. assert row.x == '{"y": 1}' @@ -191,7 +195,7 @@ def test_select_struct(faux_conn, metadata): def test_select_label_starts_w_digit(faux_conn): # Make sure label names are legal identifiers - faux_conn.execute(sqlalchemy.select([sqlalchemy.literal(1).label("2foo")])) + faux_conn.execute(sqlalchemy.select(sqlalchemy.literal(1).label("2foo"))) assert ( faux_conn.test_data["execute"][-1][0] == "SELECT %(param_1:INT64)s AS `_2foo`" ) @@ -205,7 +209,7 @@ def test_force_quote(faux_conn): "t", sqlalchemy.Column(quoted_name("foo", True), sqlalchemy.Integer), ) - faux_conn.execute(sqlalchemy.select([table])) + faux_conn.execute(sqlalchemy.select(table)) assert faux_conn.test_data["execute"][-1][0] == ("SELECT `t`.`foo` \nFROM `t`") @@ -217,14 +221,14 @@ def test_disable_quote(faux_conn): "t", sqlalchemy.Column(quoted_name("foo", False), sqlalchemy.Integer), ) - faux_conn.execute(sqlalchemy.select([table])) + faux_conn.execute(sqlalchemy.select(table)) assert faux_conn.test_data["execute"][-1][0] == ("SELECT `t`.foo \nFROM `t`") @sqlalchemy_before_1_4 def test_select_in_lit_13(faux_conn): [[isin]] = faux_conn.execute( - sqlalchemy.select([sqlalchemy.literal(1).in_([1, 2, 3])]) + sqlalchemy.select(sqlalchemy.literal(1).in_([1, 2, 3])) ) assert isin assert faux_conn.test_data["execute"][-1] == ( @@ -236,7 +240,7 @@ def test_select_in_lit_13(faux_conn): @sqlalchemy_1_4_or_higher def test_select_in_lit(faux_conn, last_query): - faux_conn.execute(sqlalchemy.select([sqlalchemy.literal(1).in_([1, 2, 3])])) + faux_conn.execute(sqlalchemy.select(sqlalchemy.literal(1).in_([1, 2, 3]))) last_query( "SELECT %(param_1:INT64)s IN UNNEST(%(param_2:INT64)s) AS `anon_1`", {"param_1": 1, "param_2": [1, 2, 3]}, @@ -246,7 +250,7 @@ def test_select_in_lit(faux_conn, last_query): def test_select_in_param(faux_conn, last_query): [[isin]] = faux_conn.execute( sqlalchemy.select( - [sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True))] + sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[1, 2, 3]), ) @@ -268,7 +272,7 @@ def test_select_in_param(faux_conn, last_query): def test_select_in_param1(faux_conn, last_query): [[isin]] = faux_conn.execute( sqlalchemy.select( - [sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True))] + sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[1]), ) @@ -289,7 +293,7 @@ def test_select_in_param1(faux_conn, last_query): def test_select_in_param_empty(faux_conn, last_query): [[isin]] = faux_conn.execute( sqlalchemy.select( - [sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True))] + sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[]), ) @@ -308,7 +312,7 @@ def test_select_in_param_empty(faux_conn, last_query): @sqlalchemy_before_1_4 def test_select_notin_lit13(faux_conn): [[isnotin]] = faux_conn.execute( - sqlalchemy.select([sqlalchemy.literal(0).notin_([1, 2, 3])]) + sqlalchemy.select(sqlalchemy.literal(0).notin_([1, 2, 3])) ) assert isnotin assert faux_conn.test_data["execute"][-1] == ( @@ -320,7 +324,7 @@ def test_select_notin_lit13(faux_conn): @sqlalchemy_1_4_or_higher def test_select_notin_lit(faux_conn, last_query): - faux_conn.execute(sqlalchemy.select([sqlalchemy.literal(0).notin_([1, 2, 3])])) + faux_conn.execute(sqlalchemy.select(sqlalchemy.literal(0).notin_([1, 2, 3]))) last_query( "SELECT (%(param_1:INT64)s NOT IN UNNEST(%(param_2:INT64)s)) AS `anon_1`", {"param_1": 0, "param_2": [1, 2, 3]}, @@ -330,7 +334,7 @@ def test_select_notin_lit(faux_conn, last_query): def test_select_notin_param(faux_conn, last_query): [[isnotin]] = faux_conn.execute( sqlalchemy.select( - [sqlalchemy.literal(1).notin_(sqlalchemy.bindparam("q", expanding=True))] + sqlalchemy.literal(1).notin_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[1, 2, 3]), ) @@ -353,7 +357,7 @@ def test_select_notin_param(faux_conn, last_query): def test_select_notin_param_empty(faux_conn, last_query): [[isnotin]] = faux_conn.execute( sqlalchemy.select( - [sqlalchemy.literal(1).notin_(sqlalchemy.bindparam("q", expanding=True))] + sqlalchemy.literal(1).notin_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[]), ) @@ -376,7 +380,7 @@ def test_literal_binds_kwarg_with_an_IN_operator_252(faux_conn): sqlalchemy.Column("val", sqlalchemy.Integer), initial_data=[dict(val=i) for i in range(3)], ) - q = sqlalchemy.select([table.c.val]).where(table.c.val.in_([2])) + q = sqlalchemy.select(table.c.val).where(table.c.val.in_([2])) def nstr(q): return " ".join(str(q).strip().split()) @@ -444,7 +448,7 @@ def test_array_indexing(faux_conn, metadata): metadata, sqlalchemy.Column("a", sqlalchemy.ARRAY(sqlalchemy.String)), ) - got = str(sqlalchemy.select([t.c.a[0]]).compile(faux_conn.engine)) + got = str(sqlalchemy.select(t.c.a[0]).compile(faux_conn.engine)) assert got == "SELECT `t`.`a`[OFFSET(%(a_1:INT64)s)] AS `anon_1` \nFROM `t`" diff --git a/tests/unit/test_sqlalchemy_bigquery.py b/tests/unit/test_sqlalchemy_bigquery.py index 06ef79d2..d64e1b97 100644 --- a/tests/unit/test_sqlalchemy_bigquery.py +++ b/tests/unit/test_sqlalchemy_bigquery.py @@ -98,7 +98,7 @@ def test_get_table_names( ): mock_bigquery_client.list_datasets.return_value = datasets_list mock_bigquery_client.list_tables.side_effect = tables_lists - table_names = engine_under_test.table_names() + table_names = sqlalchemy.inspect(engine_under_test).get_table_names() mock_bigquery_client.list_datasets.assert_called_once() assert mock_bigquery_client.list_tables.call_count == len(datasets_list) assert list(sorted(table_names)) == list(sorted(expected)) @@ -231,7 +231,7 @@ def test_unnest_function(args, kw): "1.4" ): assert isinstance( - sqlalchemy.select([f]).subquery().c.unnest.type, sqlalchemy.String + sqlalchemy.select(f).subquery().c.unnest.type, sqlalchemy.String ) From c74a0b9a8a55f3478f73dbf68dbc8b6bbcc53fbc Mon Sep 17 00:00:00 2001 From: nayaknishant Date: Thu, 2 Nov 2023 15:24:01 -0400 Subject: [PATCH 02/62] constraints updated --- owlbot.py | 4 ++-- setup.py | 4 ++-- testing/constraints-3.7.txt | 12 ------------ testing/constraints-3.8.txt | 12 ++++++++++++ 4 files changed, 16 insertions(+), 16 deletions(-) delete mode 100644 testing/constraints-3.7.txt diff --git a/owlbot.py b/owlbot.py index 152d568f..bef241e9 100644 --- a/owlbot.py +++ b/owlbot.py @@ -90,7 +90,7 @@ '''"protobuf", # dependency of grpc''', '''"protobuf", - "sqlalchemy<2.0.0", + "sqlalchemy", # dependency of grpc''', ) @@ -162,7 +162,7 @@ def compliance(session): session.skip("Compliance tests were not found") session.install("--pre", "grpcio") - session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy<2.0.0") + session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy") session.install( "mock", "pytest", diff --git a/setup.py b/setup.py index 5f72cee6..420c2823 100644 --- a/setup.py +++ b/setup.py @@ -98,9 +98,9 @@ def readme(): # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-auth>=1.25.0,<3.0.0dev", # Work around pip wack. - "google-cloud-bigquery>=2.25.2,<4.0.0dev", + "google-cloud-bigquery>=3.3.6,<4.0.0dev", "packaging", - "sqlalchemy>=2.0,<2.1", + "sqlalchemy>=2.0", ], extras_require=extras, python_requires=">=3.8, <3.12", diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt deleted file mode 100644 index 1d0a1b72..00000000 --- a/testing/constraints-3.7.txt +++ /dev/null @@ -1,12 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List *all* library dependencies and extras in this file. -# Pin the version to the lower bound. -# -# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -sqlalchemy==1.2.0 -google-auth==1.25.0 -google-cloud-bigquery==3.3.6 -google-cloud-bigquery-storage==2.0.0 -google-api-core==1.31.5 -pyarrow==3.0.0 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index 4884f96a..351720c0 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -1 +1,13 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", sqlalchemy==1.3.24 +google-auth==1.25.0 +google-cloud-bigquery==3.3.6 +google-cloud-bigquery-storage==2.0.0 +google-api-core==1.31.5 +grpcio==1.47.0 +pyarrow==3.0.0 From 001759e9535a3ea4d7be0fc8686282c046ac74ff Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 2 Nov 2023 20:48:16 +0000 Subject: [PATCH 03/62] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- CONTRIBUTING.rst | 4 ---- noxfile.py | 4 ++-- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 25cf4de3..9d2ca02f 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -80,10 +80,6 @@ We use `nox `__ to instrument our tests. The unit tests and system tests are described in the ``noxfile.py`` files in each directory. -- System tests create a live BQ dataset and destroys it with tests:: - - $ nox -s system - .. nox: https://pypi.org/project/nox/ ***************************************** diff --git a/noxfile.py b/noxfile.py index c77e5da3..f3c4ffb8 100644 --- a/noxfile.py +++ b/noxfile.py @@ -359,7 +359,7 @@ def compliance(session): session.skip("Compliance tests were not found") session.install("--pre", "grpcio") - session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=2.0.0,<2.1") + session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy") session.install( "mock", "pytest", @@ -514,7 +514,7 @@ def prerelease_deps(session): prerel_deps = [ "protobuf", - "sqlalchemy>=2.0,<2.1", + "sqlalchemy", # dependency of grpc "six", "googleapis-common-protos", From bc5725f1355ff105c98c431eacff330927c4d684 Mon Sep 17 00:00:00 2001 From: nayaknishant Date: Tue, 7 Nov 2023 11:02:29 -0500 Subject: [PATCH 04/62] fixing README.rst --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 995f9017..943ecc96 100644 --- a/README.rst +++ b/README.rst @@ -36,7 +36,7 @@ In order to use this library, you first need to go through the following steps: .. note:: This library is only compatible with SQLAlchemy versions >= 2.0.0 - For SQLAlchemy versions < 2.0.0, use `sqlalchemy-bigquery < 0.20.0`_. + For SQLAlchemy versions < 2.0.0, use `sqlalchemy-bigquery<=1.8.0`. Installation ------------ From 8d5c2cc2021411b35c1f7c0151cb925b23b35f43 Mon Sep 17 00:00:00 2001 From: nayaknishant Date: Tue, 7 Nov 2023 11:04:18 -0500 Subject: [PATCH 05/62] fixing README.rst --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 943ecc96..123ccfb7 100644 --- a/README.rst +++ b/README.rst @@ -36,7 +36,7 @@ In order to use this library, you first need to go through the following steps: .. note:: This library is only compatible with SQLAlchemy versions >= 2.0.0 - For SQLAlchemy versions < 2.0.0, use `sqlalchemy-bigquery<=1.8.0`. + For SQLAlchemy versions < 2.0.0, use ``sqlalchemy-bigquery<=1.8.0``. Installation ------------ From 4a355c45c19d1126d7398ac2d6598e913b29453e Mon Sep 17 00:00:00 2001 From: nayaknishant Date: Thu, 9 Nov 2023 10:38:13 -0500 Subject: [PATCH 06/62] upping sqlalchemy version in constraints-3.8.txt --- testing/constraints-3.8.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index 351720c0..0c1311d0 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -4,7 +4,7 @@ # Pin the version to the lower bound. # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -sqlalchemy==1.3.24 +sqlalchemy==2.0.0 google-auth==1.25.0 google-cloud-bigquery==3.3.6 google-cloud-bigquery-storage==2.0.0 From 0079d112ada04041b6c633b31d5929cfe8bda5f8 Mon Sep 17 00:00:00 2001 From: nayaknishant Date: Thu, 9 Nov 2023 10:41:37 -0500 Subject: [PATCH 07/62] adding 2.0 version restrictions to owlbot.py --- owlbot.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/owlbot.py b/owlbot.py index bef241e9..8249aed6 100644 --- a/owlbot.py +++ b/owlbot.py @@ -90,7 +90,7 @@ '''"protobuf", # dependency of grpc''', '''"protobuf", - "sqlalchemy", + "sqlalchemy>=2.0.0,<2.1", # dependency of grpc''', ) @@ -115,7 +115,7 @@ def place_before(path, text, *before_text, escape=None): "noxfile.py", "SYSTEM_TEST_PYTHON_VERSIONS=", "", - "# We're using two Python versions to test with sqlalchemy 1.3 and 1.4.", + "# We're using two Python versions to test with sqlalchemy>=2.0.0", ) place_before( @@ -162,7 +162,7 @@ def compliance(session): session.skip("Compliance tests were not found") session.install("--pre", "grpcio") - session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy") + session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=2.0.0,<2.1") session.install( "mock", "pytest", From 89ea48ee9c0963c43504d182052b14eb041063e8 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 9 Nov 2023 15:44:03 +0000 Subject: [PATCH 08/62] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- noxfile.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/noxfile.py b/noxfile.py index f3c4ffb8..306fcde7 100644 --- a/noxfile.py +++ b/noxfile.py @@ -359,7 +359,7 @@ def compliance(session): session.skip("Compliance tests were not found") session.install("--pre", "grpcio") - session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy") + session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=2.0.0,<2.1") session.install( "mock", "pytest", @@ -514,7 +514,7 @@ def prerelease_deps(session): prerel_deps = [ "protobuf", - "sqlalchemy", + "sqlalchemy>=2.0.0,<2.1", # dependency of grpc "six", "googleapis-common-protos", From 39b0d668605cb241e08980f6d6fe4aedf5db7f52 Mon Sep 17 00:00:00 2001 From: nayaknishant Date: Wed, 15 Nov 2023 11:48:59 -0500 Subject: [PATCH 09/62] fix for --- sqlalchemy_bigquery/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index 6229881b..65accf58 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -1070,7 +1070,7 @@ def __init__(self, *args, **kwargs): if isinstance(arg, sqlalchemy.sql.expression.ColumnElement): if not ( isinstance(arg.type, sqlalchemy.sql.sqltypes.ARRAY) - or isinstance(arg.type.impl, sqlalchemy.sql.sqltypes.ARRAY) + or (hasattr(arg.type, "impl") and isinstance(arg.type.impl, sqlalchemy.sql.sqltypes.ARRAY)) ): raise TypeError("The argument to unnest must have an ARRAY type.") self.type = arg.type.item_type From 1e813189a8275197cf6fe6acd442f31ce5a1398a Mon Sep 17 00:00:00 2001 From: kiraksi Date: Thu, 30 Nov 2023 19:37:42 -0800 Subject: [PATCH 10/62] Updated some compliance tests for sqla2 and bq --- noxfile.py | 4 +- sqlalchemy_bigquery/requirements.py | 6 + .../test_dialect_compliance.py | 162 ++++++++++++++---- 3 files changed, 141 insertions(+), 31 deletions(-) diff --git a/noxfile.py b/noxfile.py index 306fcde7..4c0d27f5 100644 --- a/noxfile.py +++ b/noxfile.py @@ -380,7 +380,9 @@ def compliance(session): session.run( "py.test", + "-s", #temporary for testing "-vv", + "-x", #temporary for testing f"--junitxml=compliance_{session.python}_sponge_log.xml", "--reruns=3", "--reruns-delay=60", @@ -388,7 +390,7 @@ def compliance(session): "--only-rerun=409 Already Exists", "--only-rerun=404 Not found", "--only-rerun=400 Cannot execute DML over a non-existent table", - system_test_folder_path, + #system_test_folder_path, - temporary comment for testing *session.posargs, # To suppress the "Deprecated API features detected!" warning when # features not compatible with 2.0 are detected, use a value of "1" diff --git a/sqlalchemy_bigquery/requirements.py b/sqlalchemy_bigquery/requirements.py index 90cc08db..854114d8 100644 --- a/sqlalchemy_bigquery/requirements.py +++ b/sqlalchemy_bigquery/requirements.py @@ -135,6 +135,12 @@ def schemas(self): named 'test_schema'.""" return unsupported() + + @property + def array_type(self): + """Target database must support array_type""" + return supported() + @property def implicit_default_schema(self): diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index 05db6eee..c8fc7555 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -40,10 +40,112 @@ QuotedNameArgumentTest, SimpleUpdateDeleteTest as _SimpleUpdateDeleteTest, TimestampMicrosecondsTest as _TimestampMicrosecondsTest, + TrueDivTest as _TrueDivTest, + NumericTest as _NumericTest ) +if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse("2.0"): + + class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): + data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC) + #TimestampMicrosecondsTest literal() no literal_execute parameter? Go back and add to literal()" + @pytest.mark.skip("") + def test_literal(self, literal_round_trip): + pass + def test_select_direct(self, connection): + # This func added because this test was failing when passed the + # UTC timezone. + + def literal(value, type_=None): + assert value == self.data + + if type_ is not None: + assert type_ is self.datatype + + import sqlalchemy.sql.sqltypes + + return sqlalchemy.sql.elements.literal(value, self.datatype) + + with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal): + super(TimestampMicrosecondsTest, self).test_select_direct(connection) + + def test_round_trip_executemany(self, connection): + unicode_table = self.tables.unicode_table + connection.execute( + unicode_table.insert(), + [{"id": i, "unicode_data": self.data} for i in range(3)], + ) + + rows = connection.execute(select(unicode_table.c.unicode_data)).fetchall() + eq_(rows, [(self.data,) for i in range(3)]) + for row in rows: + # 2.0 had no support for util.text_type + assert isinstance(row[0], str) + + sqlalchemy.testing.suite.test_types._UnicodeFixture.test_round_trip_executemany = ( + test_round_trip_executemany + ) + + # TrueDivTest issue because 1.4 always rounded down, but 2.0 rounds based on the data types. The assertion cannot reconcile 1.5==1 thusly + class TrueDivTest(_TrueDivTest): + @pytest.mark.skip("SQLAlchemy 2.0 rounds based on datatype") + def test_floordiv_integer(self): + #TODO: possibly compare rounded result instead? + pass + + @pytest.mark.skip("SQLAlchemy 2.0 rounds based on datatype") + def test_floordiv_integer_bound(self): + pass + + class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): + """The base tests fail if operations return rows for some reason.""" + + def test_update(self): + t = self.tables.plain_pk + connection = config.db.connect() + # Had to pass in data as a dict object in 2.0 + r = connection.execute(t.update().where(t.c.id == 2), dict(data="d2_new")) + assert not r.is_insert + # assert not r.returns_rows + + eq_( + connection.execute(t.select().order_by(t.c.id)).fetchall(), + [(1, "d1"), (2, "d2_new"), (3, "d3")], + ) + + def test_delete(self): + t = self.tables.plain_pk + connection = config.db.connect() + r = connection.execute(t.delete().where(t.c.id == 2)) + assert not r.is_insert + # assert not r.returns_rows + eq_( + connection.execute(t.select().order_by(t.c.id)).fetchall(), + [(1, "d1"), (3, "d3")], + ) + + class InsertBehaviorTest(_InsertBehaviorTest): + @pytest.mark.skip( + "BQ has no autoinc and client-side defaults can't work for select." + ) + def test_insert_from_select_autoinc(cls): + pass + + # Another autoinc error? + @pytest.mark.skip("BQ has no autoinc, unless specified") + def test_no_results_for_non_returning_insert(cls): + pass -if packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"): + # BQ only supports a precision up to 38, have to delete tests with precision exceeding that + del _NumericTest.test_enotation_decimal + del _NumericTest.test_enotation_decimal_large + + # BQ cannot preserve the order when inserting multiple rows without a primary key. Filtering will lead to test failure, must modify the test. + # TODO: Modify test for non-determinsitic row ordering + del _NumericTest.test_float_as_decimal + del _NumericTest.test_float_as_float + +elif packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"): from sqlalchemy.testing.suite import LimitOffsetTest as _LimitOffsetTest class LimitOffsetTest(_LimitOffsetTest): @@ -200,12 +302,12 @@ def insert_data(cls, connection): del QuotedNameArgumentTest -class InsertBehaviorTest(_InsertBehaviorTest): - @pytest.mark.skip( - "BQ has no autoinc and client-side defaults can't work for select." - ) - def test_insert_from_select_autoinc(cls): - pass +# class InsertBehaviorTest(_InsertBehaviorTest): +# @pytest.mark.skip( +# "BQ has no autoinc and client-side defaults can't work for select." +# ) +# def test_insert_from_select_autoinc(cls): +# pass class ExistsTest(_ExistsTest): @@ -244,29 +346,29 @@ def test_select_exists_false(self, connection): del LongNameBlowoutTest -class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): - """The base tests fail if operations return rows for some reason.""" - - def test_update(self): - t = self.tables.plain_pk - r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new") - assert not r.is_insert - # assert not r.returns_rows - - eq_( - config.db.execute(t.select().order_by(t.c.id)).fetchall(), - [(1, "d1"), (2, "d2_new"), (3, "d3")], - ) - - def test_delete(self): - t = self.tables.plain_pk - r = config.db.execute(t.delete().where(t.c.id == 2)) - assert not r.is_insert - # assert not r.returns_rows - eq_( - config.db.execute(t.select().order_by(t.c.id)).fetchall(), - [(1, "d1"), (3, "d3")], - ) +# class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): +# """The base tests fail if operations return rows for some reason.""" + +# def test_update(self): +# t = self.tables.plain_pk +# r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new") +# assert not r.is_insert +# # assert not r.returns_rows + +# eq_( +# config.db.execute(t.select().order_by(t.c.id)).fetchall(), +# [(1, "d1"), (2, "d2_new"), (3, "d3")], +# ) + +# def test_delete(self): +# t = self.tables.plain_pk +# r = config.db.execute(t.delete().where(t.c.id == 2)) +# assert not r.is_insert +# # assert not r.returns_rows +# eq_( +# config.db.execute(t.select().order_by(t.c.id)).fetchall(), +# [(1, "d1"), (3, "d3")], +# ) class CTETest(_CTETest): From eeffbe688c98fc099af09fd494dafc5629d25b02 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Fri, 1 Dec 2023 18:59:38 -0800 Subject: [PATCH 11/62] Addressed snippet errors --- samples/snippets/STRUCT.py | 2 +- samples/snippets/geography.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/snippets/STRUCT.py b/samples/snippets/STRUCT.py index ce59f90b..5cd4beb7 100644 --- a/samples/snippets/STRUCT.py +++ b/samples/snippets/STRUCT.py @@ -25,7 +25,7 @@ def example(engine): from sqlalchemy import Column, String, Integer, Float from sqlalchemy_bigquery import STRUCT - Base = declarative_base() + Base = sqlalchemy.orm.declarative_base() class Car(Base): __tablename__ = "Cars" diff --git a/samples/snippets/geography.py b/samples/snippets/geography.py index d6adc115..45058a95 100644 --- a/samples/snippets/geography.py +++ b/samples/snippets/geography.py @@ -24,7 +24,7 @@ def example(engine): from sqlalchemy import Column, String from sqlalchemy_bigquery import GEOGRAPHY - Base = declarative_base() + Base = sqlalchemy.orm.declarative_base() class Lake(Base): __tablename__ = "lakes" From 02921c20b48c666003db218b6e2bd1080fc20dd9 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Fri, 1 Dec 2023 19:05:41 -0800 Subject: [PATCH 12/62] revert bad commit --- samples/snippets/STRUCT.py | 2 +- samples/snippets/geography.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/snippets/STRUCT.py b/samples/snippets/STRUCT.py index 5cd4beb7..ce59f90b 100644 --- a/samples/snippets/STRUCT.py +++ b/samples/snippets/STRUCT.py @@ -25,7 +25,7 @@ def example(engine): from sqlalchemy import Column, String, Integer, Float from sqlalchemy_bigquery import STRUCT - Base = sqlalchemy.orm.declarative_base() + Base = declarative_base() class Car(Base): __tablename__ = "Cars" diff --git a/samples/snippets/geography.py b/samples/snippets/geography.py index 45058a95..d6adc115 100644 --- a/samples/snippets/geography.py +++ b/samples/snippets/geography.py @@ -24,7 +24,7 @@ def example(engine): from sqlalchemy import Column, String from sqlalchemy_bigquery import GEOGRAPHY - Base = sqlalchemy.orm.declarative_base() + Base = declarative_base() class Lake(Base): __tablename__ = "lakes" From 1e9a72ed046b98bcacbf6ce9123951bc2d8189d2 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Mon, 4 Dec 2023 16:32:56 -0800 Subject: [PATCH 13/62] More compliance tests checking --- noxfile.py | 4 +- .../test_dialect_compliance.py | 130 ++++++++++++++++-- 2 files changed, 119 insertions(+), 15 deletions(-) diff --git a/noxfile.py b/noxfile.py index 4c0d27f5..306fcde7 100644 --- a/noxfile.py +++ b/noxfile.py @@ -380,9 +380,7 @@ def compliance(session): session.run( "py.test", - "-s", #temporary for testing "-vv", - "-x", #temporary for testing f"--junitxml=compliance_{session.python}_sponge_log.xml", "--reruns=3", "--reruns-delay=60", @@ -390,7 +388,7 @@ def compliance(session): "--only-rerun=409 Already Exists", "--only-rerun=404 Not found", "--only-rerun=400 Cannot execute DML over a non-existent table", - #system_test_folder_path, - temporary comment for testing + system_test_folder_path, *session.posargs, # To suppress the "Deprecated API features detected!" warning when # features not compatible with 2.0 are detected, use a value of "1" diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index c8fc7555..1632a44d 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -18,6 +18,7 @@ # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. import datetime +import decimal import mock import packaging.version import pytest @@ -41,10 +42,12 @@ SimpleUpdateDeleteTest as _SimpleUpdateDeleteTest, TimestampMicrosecondsTest as _TimestampMicrosecondsTest, TrueDivTest as _TrueDivTest, - NumericTest as _NumericTest + IntegerTest as _IntegerTest, + NumericTest as _NumericTest, ) if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse("2.0"): + from sqlalchemy.sql import type_coerce class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC) @@ -132,18 +135,57 @@ def test_insert_from_select_autoinc(cls): pass # Another autoinc error? - @pytest.mark.skip("BQ has no autoinc, unless specified") + @pytest.mark.skip("") def test_no_results_for_non_returning_insert(cls): pass - # BQ only supports a precision up to 38, have to delete tests with precision exceeding that - del _NumericTest.test_enotation_decimal - del _NumericTest.test_enotation_decimal_large - - # BQ cannot preserve the order when inserting multiple rows without a primary key. Filtering will lead to test failure, must modify the test. - # TODO: Modify test for non-determinsitic row ordering - del _NumericTest.test_float_as_decimal - del _NumericTest.test_float_as_float + # BQ has no autoinc and client-side defaults can't work for select + del _IntegerTest.test_huge_int_auto_accommodation + + + class NumericTest(_NumericTest): + @testing.fixture + def do_numeric_test(self, metadata, connection): + def run(type_, input_, output, filter_=None, check_scale=False): + t = Table("t", metadata, Column("x", type_)) + t.create(connection) + connection.execute(t.insert(), [{"x": x} for x in input_]) + + result = {row[0] for row in connection.execute(t.select())} + output = set(output) + if filter_: + result = {filter_(x) for x in result} + output = {filter_(x) for x in output} + eq_(result, output) + if check_scale: + eq_([str(x) for x in result], [str(x) for x in output]) + + where_expr = True + + # Adding where clause + connection.execute(t.delete().where(where_expr)) + + # test that this is actually a number! + # note we have tiny scale here as we have tests with very + # small scale Numeric types. PostgreSQL will raise an error + # if you use values outside the available scale. + if type_.asdecimal: + test_value = decimal.Decimal("2.9") + add_value = decimal.Decimal("37.12") + else: + test_value = 2.9 + add_value = 37.12 + + connection.execute(t.insert(), {"x": test_value}) + assert_we_are_a_number = connection.scalar( + select(type_coerce(t.c.x + add_value, type_)) + ) + eq_( + round(assert_we_are_a_number, 3), + round(test_value + add_value, 3), + ) + + return run elif packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"): from sqlalchemy.testing.suite import LimitOffsetTest as _LimitOffsetTest @@ -183,6 +225,38 @@ def literal(value, type_=None): with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal): super(TimestampMicrosecondsTest, self).test_select_direct(connection) + + class InsertBehaviorTest(_InsertBehaviorTest): + @pytest.mark.skip( + "BQ has no autoinc and client-side defaults can't work for select." + ) + def test_insert_from_select_autoinc(cls): + pass + + class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): + """The base tests fail if operations return rows for some reason.""" + + def test_update(self): + t = self.tables.plain_pk + r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new") + assert not r.is_insert + # assert not r.returns_rows + + eq_( + config.db.execute(t.select().order_by(t.c.id)).fetchall(), + [(1, "d1"), (2, "d2_new"), (3, "d3")], + ) + + def test_delete(self): + t = self.tables.plain_pk + r = config.db.execute(t.delete().where(t.c.id == 2)) + assert not r.is_insert + # assert not r.returns_rows + eq_( + config.db.execute(t.select().order_by(t.c.id)).fetchall(), + [(1, "d1"), (3, "d3")], + ) + else: from sqlalchemy.testing.suite import ( @@ -297,9 +371,29 @@ def insert_data(cls, connection): ], ) + class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): + """The base tests fail if operations return rows for some reason.""" -# Quotes aren't allowed in BigQuery table names. -del QuotedNameArgumentTest + def test_update(self): + t = self.tables.plain_pk + r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new") + assert not r.is_insert + # assert not r.returns_rows + + eq_( + config.db.execute(t.select().order_by(t.c.id)).fetchall(), + [(1, "d1"), (2, "d2_new"), (3, "d3")], + ) + + def test_delete(self): + t = self.tables.plain_pk + r = config.db.execute(t.delete().where(t.c.id == 2)) + assert not r.is_insert + # assert not r.returns_rows + eq_( + config.db.execute(t.select().order_by(t.c.id)).fetchall(), + [(1, "d1"), (3, "d3")], + ) # class InsertBehaviorTest(_InsertBehaviorTest): @@ -310,6 +404,18 @@ def insert_data(cls, connection): # pass +# Quotes aren't allowed in BigQuery table names. +del QuotedNameArgumentTest + + +class InsertBehaviorTest(_InsertBehaviorTest): + @pytest.mark.skip( + "BQ has no autoinc and client-side defaults can't work for select." + ) + def test_insert_from_select_autoinc(cls): + pass + + class ExistsTest(_ExistsTest): """ Override From a84af4b4f6b608794b7f8402f0acbfb593d408bb Mon Sep 17 00:00:00 2001 From: kiraksi Date: Mon, 4 Dec 2023 16:51:20 -0800 Subject: [PATCH 14/62] reformatted with black --- .../test_dialect_compliance.py | 36 ++++++++----------- 1 file changed, 14 insertions(+), 22 deletions(-) diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index 1632a44d..30678b02 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -48,13 +48,15 @@ if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse("2.0"): from sqlalchemy.sql import type_coerce - + class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC) - #TimestampMicrosecondsTest literal() no literal_execute parameter? Go back and add to literal()" + + # TimestampMicrosecondsTest literal() no literal_execute parameter? Go back and add to literal()" @pytest.mark.skip("") def test_literal(self, literal_round_trip): pass + def test_select_direct(self, connection): # This func added because this test was failing when passed the # UTC timezone. @@ -93,13 +95,13 @@ def test_round_trip_executemany(self, connection): class TrueDivTest(_TrueDivTest): @pytest.mark.skip("SQLAlchemy 2.0 rounds based on datatype") def test_floordiv_integer(self): - #TODO: possibly compare rounded result instead? + # TODO: possibly compare rounded result instead? pass @pytest.mark.skip("SQLAlchemy 2.0 rounds based on datatype") def test_floordiv_integer_bound(self): pass - + class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): """The base tests fail if operations return rows for some reason.""" @@ -126,7 +128,7 @@ def test_delete(self): connection.execute(t.select().order_by(t.c.id)).fetchall(), [(1, "d1"), (3, "d3")], ) - + class InsertBehaviorTest(_InsertBehaviorTest): @pytest.mark.skip( "BQ has no autoinc and client-side defaults can't work for select." @@ -134,7 +136,7 @@ class InsertBehaviorTest(_InsertBehaviorTest): def test_insert_from_select_autoinc(cls): pass - # Another autoinc error? + # TODO: Find cause of error @pytest.mark.skip("") def test_no_results_for_non_returning_insert(cls): pass @@ -142,7 +144,6 @@ def test_no_results_for_non_returning_insert(cls): # BQ has no autoinc and client-side defaults can't work for select del _IntegerTest.test_huge_int_auto_accommodation - class NumericTest(_NumericTest): @testing.fixture def do_numeric_test(self, metadata, connection): @@ -225,14 +226,14 @@ def literal(value, type_=None): with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal): super(TimestampMicrosecondsTest, self).test_select_direct(connection) - + class InsertBehaviorTest(_InsertBehaviorTest): @pytest.mark.skip( "BQ has no autoinc and client-side defaults can't work for select." ) def test_insert_from_select_autoinc(cls): pass - + class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): """The base tests fail if operations return rows for some reason.""" @@ -257,7 +258,6 @@ def test_delete(self): [(1, "d1"), (3, "d3")], ) - else: from sqlalchemy.testing.suite import ( FetchLimitOffsetTest as _FetchLimitOffsetTest, @@ -396,6 +396,10 @@ def test_delete(self): ) +# Quotes aren't allowed in BigQuery table names. +del QuotedNameArgumentTest + + # class InsertBehaviorTest(_InsertBehaviorTest): # @pytest.mark.skip( # "BQ has no autoinc and client-side defaults can't work for select." @@ -404,18 +408,6 @@ def test_delete(self): # pass -# Quotes aren't allowed in BigQuery table names. -del QuotedNameArgumentTest - - -class InsertBehaviorTest(_InsertBehaviorTest): - @pytest.mark.skip( - "BQ has no autoinc and client-side defaults can't work for select." - ) - def test_insert_from_select_autoinc(cls): - pass - - class ExistsTest(_ExistsTest): """ Override From c09a6764726f4a799cd2739a519d2874d4fb206d Mon Sep 17 00:00:00 2001 From: kiraksi Date: Thu, 7 Dec 2023 20:12:19 -0800 Subject: [PATCH 15/62] Changed more compliance tests, updated requirements for testing --- samples/snippets/requirements.txt | 2 +- setup.py | 2 +- .../test_dialect_compliance.py | 148 +++++++++++++++++- 3 files changed, 146 insertions(+), 6 deletions(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 12e0c0dc..35e9534f 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -28,6 +28,6 @@ requests==2.31.0 rsa==4.9 shapely==2.0.1 six==1.16.0 -sqlalchemy===1.4.27 +sqlalchemy===2.0.22 typing-extensions==4.7.1 urllib3==1.26.18 diff --git a/setup.py b/setup.py index 420c2823..b3e548f7 100644 --- a/setup.py +++ b/setup.py @@ -100,7 +100,7 @@ def readme(): "google-auth>=1.25.0,<3.0.0dev", # Work around pip wack. "google-cloud-bigquery>=3.3.6,<4.0.0dev", "packaging", - "sqlalchemy>=2.0", + "sqlalchemy>=2.0,<2.0.23", ], extras_require=extras, python_requires=">=3.8, <3.12", diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index 30678b02..676cab4c 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -44,6 +44,8 @@ TrueDivTest as _TrueDivTest, IntegerTest as _IntegerTest, NumericTest as _NumericTest, + DifficultParametersTest as _DifficultParametersTest, + FetchLimitOffsetTest as _FetchLimitOffsetTest, ) if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse("2.0"): @@ -91,14 +93,14 @@ def test_round_trip_executemany(self, connection): test_round_trip_executemany ) - # TrueDivTest issue because 1.4 always rounded down, but 2.0 rounds based on the data types. The assertion cannot reconcile 1.5==1 thusly + # TrueDivTest issue because SQLAlchemy always rounded down. The assertion cannot reconcile 1.5==1 thusly class TrueDivTest(_TrueDivTest): - @pytest.mark.skip("SQLAlchemy 2.0 rounds based on datatype") + @pytest.mark.skip("Bigquery rounds based on datatype") def test_floordiv_integer(self): # TODO: possibly compare rounded result instead? pass - @pytest.mark.skip("SQLAlchemy 2.0 rounds based on datatype") + @pytest.mark.skip("Bigquery rounds based on datatype") def test_floordiv_integer_bound(self): pass @@ -137,7 +139,7 @@ def test_insert_from_select_autoinc(cls): pass # TODO: Find cause of error - @pytest.mark.skip("") + @pytest.mark.skip("BQ has no autoinc and client-side defaults can't work for select.") def test_no_results_for_non_returning_insert(cls): pass @@ -187,6 +189,144 @@ def run(type_, input_, output, filter_=None, check_scale=False): ) return run + class DifficultParametersTest(_DifficultParametersTest): + #removed parameters that dont work with bigquery + tough_parameters = testing.combinations( + ("boring",), + ("per cent",), + ("per % cent",), + ("%percent",), + ("col:ons",), + ("_starts_with_underscore",), + ("more :: %colons%",), + ("_name",), + ("___name",), + ("42numbers",), + ("percent%signs",), + ("has spaces",), + ("1param",), + ("1col:on",), + argnames="paramname", + ) + + @tough_parameters + @config.requirements.unusual_column_name_characters + def test_round_trip_same_named_column( + self, paramname, connection, metadata + ): + name = paramname + + t = Table( + "t", + metadata, + Column("id", Integer, primary_key=True), + Column(name, String(50), nullable=False), + ) + + # table is created + t.create(connection) + + # automatic param generated by insert + connection.execute(t.insert().values({"id": 1, name: "some name"})) + + # automatic param generated by criteria, plus selecting the column + stmt = select(t.c[name]).where(t.c[name] == "some name") + + eq_(connection.scalar(stmt), "some name") + + # use the name in a param explicitly + stmt = select(t.c[name]).where(t.c[name] == bindparam(name)) + + row = connection.execute(stmt, {name: "some name"}).first() + + # name works as the key from cursor.description + eq_(row._mapping[name], "some name") + + # use expanding IN + stmt = select(t.c[name]).where( + t.c[name].in_(["some name", "some other_name"]) + ) + + row = connection.execute(stmt).first() + + @testing.fixture + def multirow_fixture(self, metadata, connection): + mytable = Table( + "mytable", + metadata, + Column("myid", Integer), + Column("name", String(50)), + Column("desc", String(50)), + ) + + mytable.create(connection) + + connection.execute( + mytable.insert(), + [ + {"myid": 1, "name": "a", "desc": "a_desc"}, + {"myid": 2, "name": "b", "desc": "b_desc"}, + {"myid": 3, "name": "c", "desc": "c_desc"}, + {"myid": 4, "name": "d", "desc": "d_desc"}, + ], + ) + yield mytable + + @tough_parameters + def test_standalone_bindparam_escape( + self, paramname, connection, multirow_fixture + ): + tbl1 = multirow_fixture + stmt = select(tbl1.c.myid).where( + tbl1.c.name == bindparam(paramname, value="x") + ) + res = connection.scalar(stmt, {paramname: "c"}) + eq_(res, 3) + + @tough_parameters + def test_standalone_bindparam_escape_expanding( + self, paramname, connection, multirow_fixture + ): + tbl1 = multirow_fixture + stmt = ( + select(tbl1.c.myid) + .where(tbl1.c.name.in_(bindparam(paramname, value=["a", "b"]))) + .order_by(tbl1.c.myid) + ) + + res = connection.scalars(stmt, {paramname: ["d", "a"]}).all() + eq_(res, [1, 4]) + + + class FetchLimitOffsetTest(_FetchLimitOffsetTest): + @pytest.mark.skip("BigQuery doesn't allow an offset without a limit.") + def test_simple_offset(self): + pass + + test_bound_offset = test_simple_offset + test_expr_offset = test_simple_offset_zero = test_simple_offset + + # The original test is missing an order by. + + # Also, note that sqlalchemy union is a union distinct, not a + # union all. This test caught that were were getting that wrong. + def test_limit_render_multiple_times(self, connection): + table = self.tables.some_table + stmt = select(table.c.id).order_by(table.c.id).limit(1).scalar_subquery() + + u = sqlalchemy.union(select(stmt), select(stmt)).subquery().select() + + self._assert_result( + connection, + u, + [(1,)], + ) + + # from else statement .... + del DistinctOnTest # expects unquoted table names. + del HasIndexTest # BQ doesn't do the indexes that SQLA is loooking for. + del IdentityAutoincrementTest # BQ doesn't do autoincrement + elif packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"): from sqlalchemy.testing.suite import LimitOffsetTest as _LimitOffsetTest From 00274e051a01d7f3a8910278a4012b6819305fd1 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Fri, 8 Dec 2023 09:14:29 -0800 Subject: [PATCH 16/62] Fixed attributeerror in failing sample test --- sqlalchemy_bigquery/_struct.py | 2 ++ sqlalchemy_bigquery/base.py | 5 ++++- sqlalchemy_bigquery/requirements.py | 3 +-- .../test_dialect_compliance.py | 14 +++++++------- 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/sqlalchemy_bigquery/_struct.py b/sqlalchemy_bigquery/_struct.py index fc551c12..7c084c98 100644 --- a/sqlalchemy_bigquery/_struct.py +++ b/sqlalchemy_bigquery/_struct.py @@ -103,6 +103,8 @@ def _setup_getitem(self, name): def __getattr__(self, name): if name.lower() in self.expr.type._STRUCT_byname: return self[name] + else: + raise AttributeError(name) comparator_factory = Comparator diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index 65accf58..1d65d166 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -1070,7 +1070,10 @@ def __init__(self, *args, **kwargs): if isinstance(arg, sqlalchemy.sql.expression.ColumnElement): if not ( isinstance(arg.type, sqlalchemy.sql.sqltypes.ARRAY) - or (hasattr(arg.type, "impl") and isinstance(arg.type.impl, sqlalchemy.sql.sqltypes.ARRAY)) + or ( + hasattr(arg.type, "impl") + and isinstance(arg.type.impl, sqlalchemy.sql.sqltypes.ARRAY) + ) ): raise TypeError("The argument to unnest must have an ARRAY type.") self.type = arg.type.item_type diff --git a/sqlalchemy_bigquery/requirements.py b/sqlalchemy_bigquery/requirements.py index 854114d8..118e3946 100644 --- a/sqlalchemy_bigquery/requirements.py +++ b/sqlalchemy_bigquery/requirements.py @@ -135,13 +135,12 @@ def schemas(self): named 'test_schema'.""" return unsupported() - + @property def array_type(self): """Target database must support array_type""" return supported() - @property def implicit_default_schema(self): """target system has a strong concept of 'default' schema that can diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index 676cab4c..1b3412cc 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -139,7 +139,9 @@ def test_insert_from_select_autoinc(cls): pass # TODO: Find cause of error - @pytest.mark.skip("BQ has no autoinc and client-side defaults can't work for select.") + @pytest.mark.skip( + "BQ has no autoinc and client-side defaults can't work for select." + ) def test_no_results_for_non_returning_insert(cls): pass @@ -189,8 +191,9 @@ def run(type_, input_, output, filter_=None, check_scale=False): ) return run + class DifficultParametersTest(_DifficultParametersTest): - #removed parameters that dont work with bigquery + # removed parameters that dont work with bigquery tough_parameters = testing.combinations( ("boring",), ("per cent",), @@ -211,9 +214,7 @@ class DifficultParametersTest(_DifficultParametersTest): @tough_parameters @config.requirements.unusual_column_name_characters - def test_round_trip_same_named_column( - self, paramname, connection, metadata - ): + def test_round_trip_same_named_column(self, paramname, connection, metadata): name = paramname t = Table( @@ -297,7 +298,6 @@ def test_standalone_bindparam_escape_expanding( res = connection.scalars(stmt, {paramname: ["d", "a"]}).all() eq_(res, [1, 4]) - class FetchLimitOffsetTest(_FetchLimitOffsetTest): @pytest.mark.skip("BigQuery doesn't allow an offset without a limit.") def test_simple_offset(self): @@ -321,7 +321,7 @@ def test_limit_render_multiple_times(self, connection): u, [(1,)], ) - + # from else statement .... del DistinctOnTest # expects unquoted table names. del HasIndexTest # BQ doesn't do the indexes that SQLA is loooking for. From cb34e6bf406c7ee139a20fb232c69abbce11fd9d Mon Sep 17 00:00:00 2001 From: kiraksi Date: Sun, 10 Dec 2023 22:59:39 -0800 Subject: [PATCH 17/62] Fixed geolography test failing issue --- sqlalchemy_bigquery/base.py | 15 +++++++++-- .../test_dialect_compliance.py | 26 +++++++------------ tests/system/test_geography.py | 4 ++- 3 files changed, 26 insertions(+), 19 deletions(-) diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index 1d65d166..9891aada 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -972,8 +972,19 @@ def _get_table(self, connection, table_name, schema=None): return table def has_table(self, connection, table_name, schema=None, **kw): - """ - No kw are supported + """Checks whether a table exists in BigQuery. + + Args: + connection (google.cloud.bigquery.client.Client): The client + object used to interact with BigQuery. + table_name (str): The name of the table to check for. + schema (str, optional): The name of the schema to which the table + belongs. Defaults to the default schema. + **kw (dict): Any extra keyword arguments will be ignored. + + Returns: + bool: True if the table exists, False otherwise. + """ try: self._get_table(connection, table_name, schema) diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index 1b3412cc..3094cf69 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -41,24 +41,21 @@ QuotedNameArgumentTest, SimpleUpdateDeleteTest as _SimpleUpdateDeleteTest, TimestampMicrosecondsTest as _TimestampMicrosecondsTest, - TrueDivTest as _TrueDivTest, - IntegerTest as _IntegerTest, - NumericTest as _NumericTest, - DifficultParametersTest as _DifficultParametersTest, - FetchLimitOffsetTest as _FetchLimitOffsetTest, ) if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse("2.0"): from sqlalchemy.sql import type_coerce + from sqlalchemy.testing.suite import ( + TrueDivTest as _TrueDivTest, + IntegerTest as _IntegerTest, + NumericTest as _NumericTest, + DifficultParametersTest as _DifficultParametersTest, + FetchLimitOffsetTest as _FetchLimitOffsetTest, + ) class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC) - # TimestampMicrosecondsTest literal() no literal_execute parameter? Go back and add to literal()" - @pytest.mark.skip("") - def test_literal(self, literal_round_trip): - pass - def test_select_direct(self, connection): # This func added because this test was failing when passed the # UTC timezone. @@ -93,14 +90,12 @@ def test_round_trip_executemany(self, connection): test_round_trip_executemany ) - # TrueDivTest issue because SQLAlchemy always rounded down. The assertion cannot reconcile 1.5==1 thusly class TrueDivTest(_TrueDivTest): - @pytest.mark.skip("Bigquery rounds based on datatype") + @pytest.mark.skip("BQ rounds based on datatype") def test_floordiv_integer(self): - # TODO: possibly compare rounded result instead? pass - @pytest.mark.skip("Bigquery rounds based on datatype") + @pytest.mark.skip("BQ rounds based on datatype") def test_floordiv_integer_bound(self): pass @@ -138,7 +133,6 @@ class InsertBehaviorTest(_InsertBehaviorTest): def test_insert_from_select_autoinc(cls): pass - # TODO: Find cause of error @pytest.mark.skip( "BQ has no autoinc and client-side defaults can't work for select." ) @@ -167,7 +161,7 @@ def run(type_, input_, output, filter_=None, check_scale=False): where_expr = True - # Adding where clause + # Adding where clause for 2.0 compatibility connection.execute(t.delete().where(where_expr)) # test that this is actually a number! diff --git a/tests/system/test_geography.py b/tests/system/test_geography.py index 50939513..c04748af 100644 --- a/tests/system/test_geography.py +++ b/tests/system/test_geography.py @@ -128,7 +128,9 @@ def test_geoalchemy2_core(bigquery_dataset): int( list( conn.execute( - select(lake_table.c.geog.st_area(), lake_table.c.name == "test2") + select(lake_table.c.geog.st_area()).where( + lake_table.c.name == "test2" + ) ) )[0][0] ) From 08d93b04d4ae5e05f7530564fbf672e1e21ee174 Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Mon, 11 Dec 2023 14:12:56 +0000 Subject: [PATCH 18/62] Minor tweaks to tests and code --- README.rst | 16 +++--- noxfile.py | 2 +- setup.cfg | 3 + setup.py | 2 +- sqlalchemy_bigquery/base.py | 15 ++--- sqlalchemy_bigquery/requirements.py | 2 +- .../test_dialect_compliance.py | 55 +++++++++++++------ 7 files changed, 61 insertions(+), 34 deletions(-) diff --git a/README.rst b/README.rst index 123ccfb7..61930823 100644 --- a/README.rst +++ b/README.rst @@ -35,8 +35,8 @@ In order to use this library, you first need to go through the following steps: .. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html .. note:: - This library is only compatible with SQLAlchemy versions >= 2.0.0 - For SQLAlchemy versions < 2.0.0, use ``sqlalchemy-bigquery<=1.8.0``. + This library is a prerelease to gauge compatiblity with SQLAlchemy + versions >= 2.0.0 Installation ------------ @@ -105,11 +105,12 @@ SQLAlchemy .. code-block:: python from sqlalchemy import * + from sqlalchemy.engine import create_engine + from sqlalchemy.schema import * engine = create_engine('bigquery://project') - metadata_obj = MetaData() - table = Table('dataset.table', metadata_obj, autoload_with=engine) - with engine.connect() as conn: - print(conn.execute(select(func.count("*")).select_from(table)).scalar()) + table = Table('dataset.table', MetaData(bind=engine), autoload=True) + print(select([func.count('*')], from_obj=table().scalar()) + Project ^^^^^^^ @@ -205,8 +206,7 @@ Note that specifying a default dataset doesn't restrict execution of queries to engine = create_engine('bigquery://project/dataset_a') # This will still execute and return rows from dataset_b - with engine.connect() as conn: - conn.execute(sqlalchemy.text('SELECT * FROM dataset_b.table')).fetchall() + engine.execute('SELECT * FROM dataset_b.table').fetchall() Connection String Parameters diff --git a/noxfile.py b/noxfile.py index 1128e855..3b9855c4 100644 --- a/noxfile.py +++ b/noxfile.py @@ -212,7 +212,7 @@ def default(session, install_extras=True): else: install_target = "." session.install("-e", install_target, "-c", constraints_path) - + session.run("python", "-m", "pip", "freeze") # Run py.test against the unit tests. session.run( "py.test", diff --git a/setup.cfg b/setup.cfg index 53ed8f6f..ce76697b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -25,3 +25,6 @@ profile_file=.sqlalchemy_dialect_compliance-profiles.txt [tool:pytest] addopts= --tb native -v -r fxX -p no:warnings python_files=tests/*test_*.py +markers = + mypy: marks tests related to mypy (deselect with '-m "not mypy"') + backend diff --git a/setup.py b/setup.py index 22894dd8..444a6fdc 100644 --- a/setup.py +++ b/setup.py @@ -101,7 +101,7 @@ def readme(): "google-auth>=1.25.0,<3.0.0dev", # Work around pip wack. "google-cloud-bigquery>=3.3.6,<4.0.0dev", "packaging", - "sqlalchemy>=2.0", + "sqlalchemy>=1.4", ], extras_require=extras, python_requires=">=3.8, <3.13", diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index 65accf58..712f0663 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -375,7 +375,7 @@ def visit_in_op_binary(self, binary, operator_, **kw): self._generate_generic_binary(binary, " IN ", **kw) ) - def visit_empty_set_expr(self, element_types): + def visit_empty_set_expr(self, element_types, **kw): return "" def visit_not_in_op_binary(self, binary, operator, **kw): @@ -624,15 +624,15 @@ def visit_NUMERIC(self, type_, **kw): class BigQueryDDLCompiler(DDLCompiler): # BigQuery has no support for foreign keys. - def visit_foreign_key_constraint(self, constraint): + def visit_foreign_key_constraint(self, constraint, **kw): return None # BigQuery has no support for primary keys. - def visit_primary_key_constraint(self, constraint): + def visit_primary_key_constraint(self, constraint, **kw): return None # BigQuery has no support for unique constraints. - def visit_unique_constraint(self, constraint): + def visit_unique_constraint(self, constraint, **kw): return None def get_column_specification(self, column, **kwargs): @@ -667,14 +667,14 @@ def post_create_table(self, table): return "" - def visit_set_table_comment(self, create): + def visit_set_table_comment(self, create, **kw): table_name = self.preparer.format_table(create.element) description = self.sql_compiler.render_literal_value( create.element.comment, sqlalchemy.sql.sqltypes.String() ) return f"ALTER TABLE {table_name} SET OPTIONS(description={description})" - def visit_drop_table_comment(self, drop): + def visit_drop_table_comment(self, drop, **kw): table_name = self.preparer.format_table(drop.element) return f"ALTER TABLE {table_name} SET OPTIONS(description=null)" @@ -1070,7 +1070,8 @@ def __init__(self, *args, **kwargs): if isinstance(arg, sqlalchemy.sql.expression.ColumnElement): if not ( isinstance(arg.type, sqlalchemy.sql.sqltypes.ARRAY) - or (hasattr(arg.type, "impl") and isinstance(arg.type.impl, sqlalchemy.sql.sqltypes.ARRAY)) + or (hasattr(arg.type, "impl") + and isinstance(arg.type.impl, sqlalchemy.sql.sqltypes.ARRAY)) ): raise TypeError("The argument to unnest must have an ARRAY type.") self.type = arg.type.item_type diff --git a/sqlalchemy_bigquery/requirements.py b/sqlalchemy_bigquery/requirements.py index 854114d8..49d645f9 100644 --- a/sqlalchemy_bigquery/requirements.py +++ b/sqlalchemy_bigquery/requirements.py @@ -24,7 +24,7 @@ import sqlalchemy.testing.requirements import sqlalchemy.testing.exclusions - +from sqlalchemy.testing.exclusions import against, only_on supported = sqlalchemy.testing.exclusions.open unsupported = sqlalchemy.testing.exclusions.closed diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index 30678b02..206f8167 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -28,7 +28,10 @@ import sqlalchemy.testing.suite.test_types import sqlalchemy.sql.sqltypes -from sqlalchemy.testing import util +from sqlalchemy.testing import util, config +from sqlalchemy.testing import is_false +from sqlalchemy.testing import is_true +from sqlalchemy.testing import is_ from sqlalchemy.testing.assertions import eq_ from sqlalchemy.testing.suite import config, select, exists from sqlalchemy.testing.suite import * # noqa @@ -46,6 +49,18 @@ NumericTest as _NumericTest, ) +from sqlalchemy.testing.suite.test_types import ( + ArrayTest, + NumericTest, +) + +from sqlalchemy.testing.suite.test_reflection import ( + BizarroCharacterFKResolutionTest, + ComponentReflectionTest, + OneConnectionTablesTest, + HasTableTest as _HasTableTest, +) + if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse("2.0"): from sqlalchemy.sql import type_coerce @@ -271,7 +286,8 @@ def test_simple_offset(self): test_bound_offset = test_simple_offset test_expr_offset = test_simple_offset_zero = test_simple_offset - + test_limit_offset_nobinds = test_simple_offset # TODO figure out + # how to prevent this from failing # The original test is missing an order by. # Also, note that sqlalchemy union is a union distinct, not a @@ -400,12 +416,12 @@ def test_delete(self): del QuotedNameArgumentTest -# class InsertBehaviorTest(_InsertBehaviorTest): -# @pytest.mark.skip( -# "BQ has no autoinc and client-side defaults can't work for select." -# ) -# def test_insert_from_select_autoinc(cls): -# pass +class InsertBehaviorTest(_InsertBehaviorTest): + @pytest.mark.skip( + "BQ has no autoinc and client-side defaults can't work for select." + ) + def test_insert_from_select_autoinc(cls): + pass class ExistsTest(_ExistsTest): @@ -478,14 +494,21 @@ def test_insert_from_select_round_trip(self): def test_select_recursive_round_trip(self): pass +del ComponentReflectionTest # Multiple tests re: CHECK CONSTRAINTS, etc which + # BQ does not support +# class ComponentReflectionTest(_ComponentReflectionTest): +# @pytest.mark.skip("Big query types don't track precision, length, etc.") +# def course_grained_types(): +# pass -class ComponentReflectionTest(_ComponentReflectionTest): - @pytest.mark.skip("Big query types don't track precision, length, etc.") - def course_grained_types(): - pass +# test_numeric_reflection = test_varchar_reflection = course_grained_types - test_numeric_reflection = test_varchar_reflection = course_grained_types +# @pytest.mark.skip("BQ doesn't have indexes (in the way these tests expect).") +# def test_get_indexes(self): +# pass - @pytest.mark.skip("BQ doesn't have indexes (in the way these tests expect).") - def test_get_indexes(self): - pass +del ArrayTest # only appears to apply to postgresql +del BizarroCharacterFKResolutionTest +del NumericTest.test_float_as_float +del NumericTest.test_float_as_decimal +del HasTableTest.test_has_table_cache # TODO confirm whether BQ has table caching \ No newline at end of file From 4a5c291049cef0e8e39697823fb26c2ebeaee548 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 11 Dec 2023 16:40:33 +0000 Subject: [PATCH 19/62] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- noxfile.py | 2 +- setup.cfg | 3 --- sqlalchemy_bigquery/requirements.py | 1 + .../test_dialect_compliance.py | 15 ++++++++------- 4 files changed, 10 insertions(+), 11 deletions(-) diff --git a/noxfile.py b/noxfile.py index 3b9855c4..1128e855 100644 --- a/noxfile.py +++ b/noxfile.py @@ -212,7 +212,7 @@ def default(session, install_extras=True): else: install_target = "." session.install("-e", install_target, "-c", constraints_path) - session.run("python", "-m", "pip", "freeze") + # Run py.test against the unit tests. session.run( "py.test", diff --git a/setup.cfg b/setup.cfg index ce76697b..53ed8f6f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -25,6 +25,3 @@ profile_file=.sqlalchemy_dialect_compliance-profiles.txt [tool:pytest] addopts= --tb native -v -r fxX -p no:warnings python_files=tests/*test_*.py -markers = - mypy: marks tests related to mypy (deselect with '-m "not mypy"') - backend diff --git a/sqlalchemy_bigquery/requirements.py b/sqlalchemy_bigquery/requirements.py index c97896ba..af6dec75 100644 --- a/sqlalchemy_bigquery/requirements.py +++ b/sqlalchemy_bigquery/requirements.py @@ -25,6 +25,7 @@ import sqlalchemy.testing.requirements import sqlalchemy.testing.exclusions from sqlalchemy.testing.exclusions import against, only_on + supported = sqlalchemy.testing.exclusions.open unsupported = sqlalchemy.testing.exclusions.closed diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index d76f12ee..0b7198e9 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -420,8 +420,8 @@ def test_simple_offset(self): test_bound_offset = test_simple_offset test_expr_offset = test_simple_offset_zero = test_simple_offset - test_limit_offset_nobinds = test_simple_offset # TODO figure out - # how to prevent this from failing + test_limit_offset_nobinds = test_simple_offset # TODO figure out + # how to prevent this from failing # The original test is missing an order by. # Also, note that sqlalchemy union is a union distinct, not a @@ -628,8 +628,9 @@ def test_insert_from_select_round_trip(self): def test_select_recursive_round_trip(self): pass -del ComponentReflectionTest # Multiple tests re: CHECK CONSTRAINTS, etc which - # BQ does not support + +del ComponentReflectionTest # Multiple tests re: CHECK CONSTRAINTS, etc which +# BQ does not support # class ComponentReflectionTest(_ComponentReflectionTest): # @pytest.mark.skip("Big query types don't track precision, length, etc.") # def course_grained_types(): @@ -641,8 +642,8 @@ def test_select_recursive_round_trip(self): # def test_get_indexes(self): # pass -del ArrayTest # only appears to apply to postgresql -del BizarroCharacterFKResolutionTest +del ArrayTest # only appears to apply to postgresql +del BizarroCharacterFKResolutionTest del NumericTest.test_float_as_float del NumericTest.test_float_as_decimal -del HasTableTest.test_has_table_cache # TODO confirm whether BQ has table caching \ No newline at end of file +del HasTableTest.test_has_table_cache # TODO confirm whether BQ has table caching From c4c99763b15ba33e5ff5b2acfe575e3d3fec1516 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Mon, 11 Dec 2023 15:15:22 -0800 Subject: [PATCH 20/62] Fixed small error in compliance tests, added pip freeze for owl bot testing --- noxfile.py | 3 +++ .../sqlalchemy_dialect_compliance/test_dialect_compliance.py | 5 +---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/noxfile.py b/noxfile.py index 3b9855c4..bcce27a8 100644 --- a/noxfile.py +++ b/noxfile.py @@ -416,6 +416,9 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") + + session.run("python", "-m", "pip", "freeze") + session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index d76f12ee..41b23ea1 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -48,14 +48,13 @@ from sqlalchemy.testing.suite.test_types import ( ArrayTest, - NumericTest, ) from sqlalchemy.testing.suite.test_reflection import ( BizarroCharacterFKResolutionTest, ComponentReflectionTest, OneConnectionTablesTest, - HasTableTest as _HasTableTest, + HasTableTest, ) if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse("2.0"): @@ -643,6 +642,4 @@ def test_select_recursive_round_trip(self): del ArrayTest # only appears to apply to postgresql del BizarroCharacterFKResolutionTest -del NumericTest.test_float_as_float -del NumericTest.test_float_as_decimal del HasTableTest.test_has_table_cache # TODO confirm whether BQ has table caching \ No newline at end of file From 2612a7047bfcd47c0ad02a6b8b43926b1c657ac1 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Tue, 12 Dec 2023 01:59:48 -0800 Subject: [PATCH 21/62] Fixed some failing compliance tests by reformatting --- .../test_dialect_compliance.py | 25 +++++++++++-------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index 3d29f9b6..e4c1d1cc 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -59,12 +59,14 @@ if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse("2.0"): from sqlalchemy.sql import type_coerce + from sqlalchemy import create_engine from sqlalchemy.testing.suite import ( TrueDivTest as _TrueDivTest, IntegerTest as _IntegerTest, NumericTest as _NumericTest, DifficultParametersTest as _DifficultParametersTest, FetchLimitOffsetTest as _FetchLimitOffsetTest, + PostCompileParamsTest as _PostCompileParamsTest, ) class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): @@ -313,6 +315,9 @@ def test_simple_offset(self): test_bound_offset = test_simple_offset test_expr_offset = test_simple_offset_zero = test_simple_offset + test_limit_offset_nobinds = test_simple_offset # TODO figure out + # how to prevent this from failing + # The original test is missing an order by. # The original test is missing an order by. @@ -334,7 +339,7 @@ def test_limit_render_multiple_times(self, connection): del DistinctOnTest # expects unquoted table names. del HasIndexTest # BQ doesn't do the indexes that SQLA is loooking for. del IdentityAutoincrementTest # BQ doesn't do autoincrement - + del PostCompileParamsTest # BQ adds backticks to bind parameters, causing failure of tests TODO: fix this? elif packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"): from sqlalchemy.testing.suite import LimitOffsetTest as _LimitOffsetTest @@ -549,12 +554,12 @@ def test_delete(self): del QuotedNameArgumentTest -class InsertBehaviorTest(_InsertBehaviorTest): - @pytest.mark.skip( - "BQ has no autoinc and client-side defaults can't work for select." - ) - def test_insert_from_select_autoinc(cls): - pass +# class InsertBehaviorTest(_InsertBehaviorTest): +# @pytest.mark.skip( +# "BQ has no autoinc and client-side defaults can't work for select." +# ) +# def test_insert_from_select_autoinc(cls): +# pass class ExistsTest(_ExistsTest): @@ -641,6 +646,6 @@ def test_select_recursive_round_trip(self): # def test_get_indexes(self): # pass -del ArrayTest # only appears to apply to postgresql -del BizarroCharacterFKResolutionTest -del HasTableTest.test_has_table_cache # TODO confirm whether BQ has table caching \ No newline at end of file +del ArrayTest # only appears to apply to postgresql +del BizarroCharacterFKResolutionTest +del HasTableTest.test_has_table_cache # TODO confirm whether BQ has table caching From 5d3d850ab9ada6a6f7d2ba6320dd08c555cb65c7 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Tue, 12 Dec 2023 23:35:31 -0800 Subject: [PATCH 22/62] Added UuidTest to compliance tests --- .../test_dialect_compliance.py | 118 +++++++++++++++++- 1 file changed, 116 insertions(+), 2 deletions(-) diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index e4c1d1cc..50638cd4 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -58,15 +58,17 @@ ) if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse("2.0"): + import uuid from sqlalchemy.sql import type_coerce - from sqlalchemy import create_engine + from sqlalchemy import Uuid from sqlalchemy.testing.suite import ( TrueDivTest as _TrueDivTest, IntegerTest as _IntegerTest, NumericTest as _NumericTest, DifficultParametersTest as _DifficultParametersTest, FetchLimitOffsetTest as _FetchLimitOffsetTest, - PostCompileParamsTest as _PostCompileParamsTest, + PostCompileParamsTest, + UuidTest as _UuidTest, ) class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): @@ -335,6 +337,111 @@ def test_limit_render_multiple_times(self, connection): [(1,)], ) + class UuidTest(_UuidTest): + @classmethod + def define_tables(cls, metadata): + Table( + "uuid_table", + metadata, + Column("id", Integer, primary_key=True, test_needs_autoincrement=True), + Column("uuid_data", String), # Use native UUID for primary data + Column( + "uuid_text_data", String, nullable=True + ), # Optional text representation + Column("uuid_data_nonnative", String), + Column("uuid_text_data_nonnative", String), + ) + + def test_uuid_round_trip(self, connection): + data = str(uuid.uuid4()) + uuid_table = self.tables.uuid_table + + connection.execute( + uuid_table.insert(), + {"id": 1, "uuid_data": data, "uuid_data_nonnative": data}, + ) + row = connection.execute( + select(uuid_table.c.uuid_data, uuid_table.c.uuid_data_nonnative).where( + uuid_table.c.uuid_data == data, + uuid_table.c.uuid_data_nonnative == data, + ) + ).first() + eq_(row, (data, data)) + + def test_uuid_text_round_trip(self, connection): + data = str(uuid.uuid4()) + uuid_table = self.tables.uuid_table + + connection.execute( + uuid_table.insert(), + { + "id": 1, + "uuid_text_data": data, + "uuid_text_data_nonnative": data, + }, + ) + row = connection.execute( + select( + uuid_table.c.uuid_text_data, + uuid_table.c.uuid_text_data_nonnative, + ).where( + uuid_table.c.uuid_text_data == data, + uuid_table.c.uuid_text_data_nonnative == data, + ) + ).first() + eq_((row[0].lower(), row[1].lower()), (data, data)) + + def test_literal_uuid(self, literal_round_trip): + data = str(uuid.uuid4()) + literal_round_trip(String(), [data], [data]) + + def test_literal_text(self, literal_round_trip): + data = str(uuid.uuid4()) + literal_round_trip( + String(), + [data], + [data], + filter_=lambda x: x.lower(), + ) + + def test_literal_nonnative_uuid(self, literal_round_trip): + data = str(uuid.uuid4()) + literal_round_trip(String(), [data], [data]) + + def test_literal_nonnative_text(self, literal_round_trip): + data = str(uuid.uuid4()) + literal_round_trip( + String(), + [data], + [data], + filter_=lambda x: x.lower(), + ) + + @testing.requires.insert_returning + def test_uuid_returning(self, connection): + data = str(uuid.uuid4()) + str_data = str(data) + uuid_table = self.tables.uuid_table + + result = connection.execute( + uuid_table.insert().returning( + uuid_table.c.uuid_data, + uuid_table.c.uuid_text_data, + uuid_table.c.uuid_data_nonnative, + uuid_table.c.uuid_text_data_nonnative, + ), + { + "id": 1, + "uuid_data": data, + "uuid_text_data": str_data, + "uuid_data_nonnative": data, + "uuid_text_data_nonnative": str_data, + }, + ) + row = result.first() + + eq_(row, (data, str_data, data, str_data)) + # from else statement .... del DistinctOnTest # expects unquoted table names. del HasIndexTest # BQ doesn't do the indexes that SQLA is loooking for. @@ -525,6 +632,13 @@ def insert_data(cls, connection): ], ) + class InsertBehaviorTest(_InsertBehaviorTest): + @pytest.mark.skip( + "BQ has no autoinc and client-side defaults can't work for select." + ) + def test_insert_from_select_autoinc(cls): + pass + class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): """The base tests fail if operations return rows for some reason.""" From a3a4ee6e12b38e3ea9a0ac4cabb890bd77c7a9cb Mon Sep 17 00:00:00 2001 From: kiraksi Date: Fri, 15 Dec 2023 09:58:41 -0800 Subject: [PATCH 23/62] Moved back sqlalchemy constraints to 1.4 --- noxfile.py | 6 ++-- owlbot.py | 72 +++++++++++++++++++------------------ setup.py | 2 +- testing/constraints-3.8.txt | 2 +- 4 files changed, 42 insertions(+), 40 deletions(-) diff --git a/noxfile.py b/noxfile.py index a6884868..93b52232 100644 --- a/noxfile.py +++ b/noxfile.py @@ -369,7 +369,7 @@ def compliance(session): session.skip("Compliance tests were not found") session.install("--pre", "grpcio") - session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=2.0.0,<2.1") + session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=1.4,<2.1") session.install( "mock", "pytest", @@ -418,7 +418,7 @@ def cover(session): session.install("coverage", "pytest-cov") session.run("python", "-m", "pip", "freeze") - + session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") @@ -527,7 +527,7 @@ def prerelease_deps(session): prerel_deps = [ "protobuf", - "sqlalchemy>=2.0.0,<2.1", + "sqlalchemy>=1.4,<2.1", # dependency of grpc "six", "googleapis-common-protos", diff --git a/owlbot.py b/owlbot.py index 8fbda63e..06e9ada4 100644 --- a/owlbot.py +++ b/owlbot.py @@ -42,14 +42,17 @@ system_test_extras=extras, system_test_extras_by_python=extras_by_python, ) -s.move(templated_files, excludes=[ - # sqlalchemy-bigquery was originally licensed MIT - "LICENSE", - "docs/multiprocessing.rst", - # exclude gh actions as credentials are needed for tests - ".github/workflows", - "README.rst", -]) +s.move( + templated_files, + excludes=[ + # sqlalchemy-bigquery was originally licensed MIT + "LICENSE", + "docs/multiprocessing.rst", + # exclude gh actions as credentials are needed for tests + ".github/workflows", + "README.rst", + ], +) # ---------------------------------------------------------------------------- # Fixup files @@ -59,7 +62,7 @@ [".coveragerc"], "google/cloud/__init__.py", "sqlalchemy_bigquery/requirements.py", - ) +) s.replace( ["noxfile.py"], @@ -75,12 +78,14 @@ s.replace( - ["noxfile.py"], "--cov=google", "--cov=sqlalchemy_bigquery", + ["noxfile.py"], + "--cov=google", + "--cov=sqlalchemy_bigquery", ) s.replace( - ["noxfile.py"], + ["noxfile.py"], "\+ SYSTEM_TEST_EXTRAS", "", ) @@ -88,35 +93,34 @@ s.replace( ["noxfile.py"], - '''"protobuf", - # dependency of grpc''', - '''"protobuf", - "sqlalchemy>=2.0.0,<2.1", - # dependency of grpc''', + """"protobuf", + # dependency of grpc""", + """"protobuf", + "sqlalchemy>=1.4,<2.1", + # dependency of grpc""", ) s.replace( ["noxfile.py"], r"def default\(session\)", - "def default(session, install_extras=True)", + "def default(session, install_extras=True)", ) - - def place_before(path, text, *before_text, escape=None): replacement = "\n".join(before_text) + "\n" + text if escape: for c in escape: - text = text.replace(c, '\\' + c) + text = text.replace(c, "\\" + c) s.replace([path], text, replacement) + place_before( "noxfile.py", "SYSTEM_TEST_PYTHON_VERSIONS=", "", - "# We're using two Python versions to test with sqlalchemy>=2.0.0", + "# We're using two Python versions to test with sqlalchemy>=1.4", ) place_before( @@ -126,7 +130,7 @@ def place_before(path, text, *before_text, escape=None): ) -install_logic = ''' +install_logic = """ if install_extras and session.python in ["3.11", "3.12"]: install_target = ".[geography,alembic,tests,bqstorage]" elif install_extras: @@ -134,7 +138,7 @@ def place_before(path, text, *before_text, escape=None): else: install_target = "." session.install("-e", install_target, "-c", constraints_path) -''' +""" place_before( "noxfile.py", @@ -163,7 +167,7 @@ def compliance(session): session.skip("Compliance tests were not found") session.install("--pre", "grpcio") - session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=2.0.0,<2.1") + session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=1.4,<2.1") session.install( "mock", "pytest", @@ -205,12 +209,11 @@ def compliance(session): ''' place_before( - "noxfile.py", - "@nox.session(python=DEFAULT_PYTHON_VERSION)\n" - "def cover(session):", - compliance, - escape="()", - ) + "noxfile.py", + "@nox.session(python=DEFAULT_PYTHON_VERSION)\n" "def cover(session):", + compliance, + escape="()", +) s.replace(["noxfile.py"], '"alabaster"', '"alabaster", "geoalchemy2", "shapely"') @@ -266,11 +269,10 @@ def system_noextras(session): place_before( "noxfile.py", - "@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS[-1])\n" - "def compliance(session):", + "@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS[-1])\n" "def compliance(session):", system_noextras, escape="()[]", - ) +) # Add DB config for SQLAlchemy dialect test suite. @@ -287,7 +289,7 @@ def system_noextras(session): [tool:pytest] addopts= --tb native -v -r fxX -p no:warnings python_files=tests/*test_*.py -""" +""", ) # ---------------------------------------------------------------------------- @@ -298,7 +300,7 @@ def system_noextras(session): python.py_samples(skip_readmes=True) s.replace( - ["./samples/snippets/noxfile.py"], + ["./samples/snippets/noxfile.py"], """session.install\("-e", _get_repo_root\(\)\)""", """session.install("-e", _get_repo_root()) else: diff --git a/setup.py b/setup.py index 1c27f33a..588b7f33 100644 --- a/setup.py +++ b/setup.py @@ -101,7 +101,7 @@ def readme(): "google-auth>=1.25.0,<3.0.0dev", # Work around pip wack. "google-cloud-bigquery>=3.3.6,<4.0.0dev", "packaging", - "sqlalchemy>=2.0,<2.0.23", + "sqlalchemy>=1.4,<2.0.23", ], extras_require=extras, python_requires=">=3.8, <3.13", diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index 0c1311d0..0f6d97ae 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -4,7 +4,7 @@ # Pin the version to the lower bound. # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -sqlalchemy==2.0.0 +sqlalchemy>=1.4.0 google-auth==1.25.0 google-cloud-bigquery==3.3.6 google-cloud-bigquery-storage==2.0.0 From a71e1ee370710ad15379507688a43cac083b11ee Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 15 Dec 2023 12:33:42 -0600 Subject: [PATCH 24/62] Update testing/constraints-3.8.txt --- testing/constraints-3.8.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index 0f6d97ae..03a23807 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -4,7 +4,7 @@ # Pin the version to the lower bound. # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -sqlalchemy>=1.4.0 +sqlalchemy==1.4.0 google-auth==1.25.0 google-cloud-bigquery==3.3.6 google-cloud-bigquery-storage==2.0.0 From 708ec59ca57af466f792be7816fcde8f883159ef Mon Sep 17 00:00:00 2001 From: kiraksi Date: Mon, 18 Dec 2023 13:49:00 -0800 Subject: [PATCH 25/62] Fixed minimum version of sqlalchemy for 1.4 backwards compatibility --- noxfile.py | 4 ++-- owlbot.py | 6 +++--- samples/snippets/requirements.txt | 2 +- setup.py | 2 +- testing/constraints-3.8.txt | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/noxfile.py b/noxfile.py index 93b52232..0e0520a3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -369,7 +369,7 @@ def compliance(session): session.skip("Compliance tests were not found") session.install("--pre", "grpcio") - session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=1.4,<2.1") + session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=1.4.15,<2.1") session.install( "mock", "pytest", @@ -527,7 +527,7 @@ def prerelease_deps(session): prerel_deps = [ "protobuf", - "sqlalchemy>=1.4,<2.1", + "sqlalchemy>=1.4.15,<2.1", # dependency of grpc "six", "googleapis-common-protos", diff --git a/owlbot.py b/owlbot.py index 06e9ada4..50c574e9 100644 --- a/owlbot.py +++ b/owlbot.py @@ -96,7 +96,7 @@ """"protobuf", # dependency of grpc""", """"protobuf", - "sqlalchemy>=1.4,<2.1", + "sqlalchemy>=1.4.15,<2.1", # dependency of grpc""", ) @@ -120,7 +120,7 @@ def place_before(path, text, *before_text, escape=None): "noxfile.py", "SYSTEM_TEST_PYTHON_VERSIONS=", "", - "# We're using two Python versions to test with sqlalchemy>=1.4", + "# We're using two Python versions to test with sqlalchemy>=1.4.15", ) place_before( @@ -167,7 +167,7 @@ def compliance(session): session.skip("Compliance tests were not found") session.install("--pre", "grpcio") - session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=1.4,<2.1") + session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=1.4.15,<2.1") session.install( "mock", "pytest", diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 447d2966..ffb51928 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -28,6 +28,6 @@ requests==2.31.0 rsa==4.9 shapely==2.0.2 six==1.16.0 -sqlalchemy===2.0.22 +sqlalchemy===1.4.15 typing-extensions==4.9.0 urllib3==2.1.0 diff --git a/setup.py b/setup.py index 588b7f33..80432fed 100644 --- a/setup.py +++ b/setup.py @@ -101,7 +101,7 @@ def readme(): "google-auth>=1.25.0,<3.0.0dev", # Work around pip wack. "google-cloud-bigquery>=3.3.6,<4.0.0dev", "packaging", - "sqlalchemy>=1.4,<2.0.23", + "sqlalchemy>=1.4.15,<2.1", ], extras_require=extras, python_requires=">=3.8, <3.13", diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index 03a23807..8fc6fe4e 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -4,7 +4,7 @@ # Pin the version to the lower bound. # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -sqlalchemy==1.4.0 +sqlalchemy==1.4.15 google-auth==1.25.0 google-cloud-bigquery==3.3.6 google-cloud-bigquery-storage==2.0.0 From 9a9bc6b020e8a83ce2053e6272d8bb2fa06a0ff4 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Mon, 18 Dec 2023 14:01:13 -0800 Subject: [PATCH 26/62] Bumping support for sqlalchemy 1.4.16 for sample tests --- noxfile.py | 2 +- owlbot.py | 6 +++--- testing/constraints-3.8.txt | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/noxfile.py b/noxfile.py index 0e0520a3..a6b9eed4 100644 --- a/noxfile.py +++ b/noxfile.py @@ -527,7 +527,7 @@ def prerelease_deps(session): prerel_deps = [ "protobuf", - "sqlalchemy>=1.4.15,<2.1", + "sqlalchemy>=1.4.16,<2.1", # dependency of grpc "six", "googleapis-common-protos", diff --git a/owlbot.py b/owlbot.py index 50c574e9..d928ac59 100644 --- a/owlbot.py +++ b/owlbot.py @@ -96,7 +96,7 @@ """"protobuf", # dependency of grpc""", """"protobuf", - "sqlalchemy>=1.4.15,<2.1", + "sqlalchemy>=1.4.16,<2.1", # dependency of grpc""", ) @@ -120,7 +120,7 @@ def place_before(path, text, *before_text, escape=None): "noxfile.py", "SYSTEM_TEST_PYTHON_VERSIONS=", "", - "# We're using two Python versions to test with sqlalchemy>=1.4.15", + "# We're using two Python versions to test with sqlalchemy>=1.4.16", ) place_before( @@ -167,7 +167,7 @@ def compliance(session): session.skip("Compliance tests were not found") session.install("--pre", "grpcio") - session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=1.4.15,<2.1") + session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=1.4.16,<2.1") session.install( "mock", "pytest", diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index 8fc6fe4e..667a747d 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -4,7 +4,7 @@ # Pin the version to the lower bound. # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -sqlalchemy==1.4.15 +sqlalchemy==1.4.16 google-auth==1.25.0 google-cloud-bigquery==3.3.6 google-cloud-bigquery-storage==2.0.0 From 740bfcd23410b208344faa65aa279cd231b7934e Mon Sep 17 00:00:00 2001 From: kiraksi Date: Mon, 18 Dec 2023 14:07:04 -0800 Subject: [PATCH 27/62] Bump setup.py sqlalchemy to 1.4.16 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 80432fed..31565afa 100644 --- a/setup.py +++ b/setup.py @@ -101,7 +101,7 @@ def readme(): "google-auth>=1.25.0,<3.0.0dev", # Work around pip wack. "google-cloud-bigquery>=3.3.6,<4.0.0dev", "packaging", - "sqlalchemy>=1.4.15,<2.1", + "sqlalchemy>=1.4.16,<2.1", ], extras_require=extras, python_requires=">=3.8, <3.13", From 75038b0f42f34dc7c685b25a39da0b43f431bcb6 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Mon, 18 Dec 2023 15:44:56 -0800 Subject: [PATCH 28/62] Updated compliance sqlalchemy to 1.4.16 --- noxfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noxfile.py b/noxfile.py index a6b9eed4..f89f181b 100644 --- a/noxfile.py +++ b/noxfile.py @@ -369,7 +369,7 @@ def compliance(session): session.skip("Compliance tests were not found") session.install("--pre", "grpcio") - session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=1.4.15,<2.1") + session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=1.4.16,<2.1") session.install( "mock", "pytest", From 455dbeb7067d6b5e96a91f208285f72b4a27da86 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Tue, 19 Dec 2023 14:00:55 -0800 Subject: [PATCH 29/62] Fixed broken code in last merged main, as we need to avoid duplicate entries and potential implicit joins due to identical table names --- sqlalchemy_bigquery/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index dcbb2293..a44cd35d 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -275,7 +275,8 @@ def _known_tables(self): asfrom_froms = self.stack[-1].get("asfrom_froms", []) for from_ in asfrom_froms: if isinstance(from_, Table): - known_tables.add(from_.name) + if from_.name not in known_tables: + known_tables.add(from_.name) return known_tables From da9f228dd32466039137b54772030f55797a590a Mon Sep 17 00:00:00 2001 From: kiraksi Date: Tue, 19 Dec 2023 22:51:32 -0800 Subject: [PATCH 30/62] modified tests for join order variation in 1.4 vs 2.0 --- noxfile.py | 3 +++ sqlalchemy_bigquery/base.py | 3 +-- tests/unit/conftest.py | 9 +++++++- tests/unit/test_compiler.py | 41 ++++++++++++++++++++++--------------- 4 files changed, 36 insertions(+), 20 deletions(-) diff --git a/noxfile.py b/noxfile.py index f89f181b..5a5ed906 100644 --- a/noxfile.py +++ b/noxfile.py @@ -213,6 +213,8 @@ def default(session, install_extras=True): install_target = "." session.install("-e", install_target, "-c", constraints_path) + session.run("python", "-m", "pip", "freeze") + # Run py.test against the unit tests. session.run( "py.test", @@ -550,6 +552,7 @@ def prerelease_deps(session): "requests", ] session.install(*other_deps) + session.run("python", "-m", "pip", "freeze") # Print out prerelease package versions session.run( diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index a44cd35d..dcbb2293 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -275,8 +275,7 @@ def _known_tables(self): asfrom_froms = self.stack[-1].get("asfrom_froms", []) for from_ in asfrom_froms: if isinstance(from_, Table): - if from_.name not in known_tables: - known_tables.add(from_.name) + known_tables.add(from_.name) return known_tables diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index f808b380..88d3b44d 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -40,7 +40,14 @@ sqlalchemy_version >= packaging.version.parse("1.4"), reason="requires sqlalchemy 1.3 or lower", ) - +sqlalchemy_before_2_0 = pytest.mark.skipif( + sqlalchemy_version >= packaging.version.parse("2.0"), + reason="requires sqlalchemy 1.3 or lower", +) +sqlalchemy_2_0_or_higher = pytest.mark.skipif( + sqlalchemy_version < packaging.version.parse("2.0"), + reason="requires sqlalchemy 1.4 or higher", +) @pytest.fixture() def faux_conn(): diff --git a/tests/unit/test_compiler.py b/tests/unit/test_compiler.py index 139b6cbc..78843a01 100644 --- a/tests/unit/test_compiler.py +++ b/tests/unit/test_compiler.py @@ -21,7 +21,12 @@ import sqlalchemy.exc from .conftest import setup_table -from .conftest import sqlalchemy_1_4_or_higher, sqlalchemy_before_1_4 +from .conftest import ( + sqlalchemy_1_4_or_higher, + sqlalchemy_before_1_4, + sqlalchemy_2_0_or_higher, + sqlalchemy_before_2_0, +) def test_constraints_are_ignored(faux_conn, metadata): @@ -142,10 +147,10 @@ def prepare_implicit_join_base_query( return q -@sqlalchemy_before_1_4 -def test_no_implicit_join_asterix_for_inner_unnest_before_1_4(faux_conn, metadata): +@sqlalchemy_before_2_0 +def test_no_implicit_join_asterix_for_inner_unnest_before_2_0(faux_conn, metadata): # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 - q = prepare_implicit_join_base_query(faux_conn, metadata, True, True) + q = prepare_implicit_join_base_query(faux_conn, metadata, True, False) expected_initial_sql = ( "SELECT `table1`.`foo`, `table2`.`bar` \n" "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`" @@ -153,24 +158,25 @@ def test_no_implicit_join_asterix_for_inner_unnest_before_1_4(faux_conn, metadat found_initial_sql = q.compile(faux_conn).string assert found_initial_sql == expected_initial_sql - q = sqlalchemy.select(["*"]).select_from(q) + q = q.subquery() + q = sqlalchemy.select("*").select_from(q) expected_outer_sql = ( "SELECT * \n" "FROM (SELECT `table1`.`foo` AS `foo`, `table2`.`bar` AS `bar` \n" - "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`)" + "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`) AS `anon_1`" ) found_outer_sql = q.compile(faux_conn).string assert found_outer_sql == expected_outer_sql -@sqlalchemy_1_4_or_higher +@sqlalchemy_2_0_or_higher def test_no_implicit_join_asterix_for_inner_unnest(faux_conn, metadata): # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 q = prepare_implicit_join_base_query(faux_conn, metadata, True, False) expected_initial_sql = ( "SELECT `table1`.`foo`, `table2`.`bar` \n" - "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`" + "FROM unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`, `table2`" ) found_initial_sql = q.compile(faux_conn).string assert found_initial_sql == expected_initial_sql @@ -181,16 +187,16 @@ def test_no_implicit_join_asterix_for_inner_unnest(faux_conn, metadata): expected_outer_sql = ( "SELECT * \n" "FROM (SELECT `table1`.`foo` AS `foo`, `table2`.`bar` AS `bar` \n" - "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`) AS `anon_1`" + "FROM unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`, `table2`) AS `anon_1`" ) found_outer_sql = q.compile(faux_conn).string assert found_outer_sql == expected_outer_sql -@sqlalchemy_before_1_4 +@sqlalchemy_before_2_0 def test_no_implicit_join_for_inner_unnest_before_1_4(faux_conn, metadata): # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 - q = prepare_implicit_join_base_query(faux_conn, metadata, True, True) + q = prepare_implicit_join_base_query(faux_conn, metadata, True, False) expected_initial_sql = ( "SELECT `table1`.`foo`, `table2`.`bar` \n" "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`" @@ -198,24 +204,25 @@ def test_no_implicit_join_for_inner_unnest_before_1_4(faux_conn, metadata): found_initial_sql = q.compile(faux_conn).string assert found_initial_sql == expected_initial_sql - q = sqlalchemy.select([q.c.foo]).select_from(q) + q = q.subquery() + q = sqlalchemy.select(q.c.foo).select_from(q) expected_outer_sql = ( - "SELECT `foo` \n" + "SELECT `anon_1`.`foo` \n" "FROM (SELECT `table1`.`foo` AS `foo`, `table2`.`bar` AS `bar` \n" - "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`)" + "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`) AS `anon_1`" ) found_outer_sql = q.compile(faux_conn).string assert found_outer_sql == expected_outer_sql -@sqlalchemy_1_4_or_higher +@sqlalchemy_2_0_or_higher def test_no_implicit_join_for_inner_unnest(faux_conn, metadata): # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 q = prepare_implicit_join_base_query(faux_conn, metadata, True, False) expected_initial_sql = ( "SELECT `table1`.`foo`, `table2`.`bar` \n" - "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`" + "FROM unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`, `table2`" ) found_initial_sql = q.compile(faux_conn).string assert found_initial_sql == expected_initial_sql @@ -226,7 +233,7 @@ def test_no_implicit_join_for_inner_unnest(faux_conn, metadata): expected_outer_sql = ( "SELECT `anon_1`.`foo` \n" "FROM (SELECT `table1`.`foo` AS `foo`, `table2`.`bar` AS `bar` \n" - "FROM `table2`, unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`) AS `anon_1`" + "FROM unnest(`table2`.`foos`) AS `unnested_foos` JOIN `table1` ON `table1`.`foo` = `unnested_foos`, `table2`) AS `anon_1`" ) found_outer_sql = q.compile(faux_conn).string assert found_outer_sql == expected_outer_sql From 59f0a4e8855eb8173c4c1abaf5d7224c2497fb03 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Tue, 19 Dec 2023 22:54:01 -0800 Subject: [PATCH 31/62] typo --- tests/unit/conftest.py | 1 + tests/unit/test_compiler.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 88d3b44d..7d790ffb 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -49,6 +49,7 @@ reason="requires sqlalchemy 1.4 or higher", ) + @pytest.fixture() def faux_conn(): test_data = dict(execute=[]) diff --git a/tests/unit/test_compiler.py b/tests/unit/test_compiler.py index 78843a01..19993761 100644 --- a/tests/unit/test_compiler.py +++ b/tests/unit/test_compiler.py @@ -194,7 +194,7 @@ def test_no_implicit_join_asterix_for_inner_unnest(faux_conn, metadata): @sqlalchemy_before_2_0 -def test_no_implicit_join_for_inner_unnest_before_1_4(faux_conn, metadata): +def test_no_implicit_join_for_inner_unnest_before_2_0(faux_conn, metadata): # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 q = prepare_implicit_join_base_query(faux_conn, metadata, True, False) expected_initial_sql = ( From 9e8d4e7a80136f8f3418ce6deb1633848bcb25b0 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Wed, 20 Dec 2023 08:32:03 -0800 Subject: [PATCH 32/62] Modified one compliance StringTest thats been flaky --- .../test_dialect_compliance.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index 50638cd4..a8ba194c 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -68,6 +68,7 @@ DifficultParametersTest as _DifficultParametersTest, FetchLimitOffsetTest as _FetchLimitOffsetTest, PostCompileParamsTest, + StringTest as _StringTest, UuidTest as _UuidTest, ) @@ -442,6 +443,30 @@ def test_uuid_returning(self, connection): eq_(row, (data, str_data, data, str_data)) + class StringTest(_StringTest): + def test_dont_truncate_rightside( + self, metadata, connection, expr=None, expected=None + ): + t = Table( + "t", + metadata, + Column("x", String(2)), + Column("id", Integer, primary_key=True), + ) + t.create(connection) + connection.connection.commit() + connection.execute( + t.insert(), + [{"x": "AB", "id": 1}, {"x": "BC", "id": 2}, {"x": "AC", "id": 3}], + ) + combinations = [("%B%", ["AB", "BC"]), ("A%C", ["AC"]), ("A%C%Z", [])] + + for args in combinations: + eq_( + connection.scalars(select(t.c.x).where(t.c.x.like(args[0]))).all(), + args[1], + ) + # from else statement .... del DistinctOnTest # expects unquoted table names. del HasIndexTest # BQ doesn't do the indexes that SQLA is loooking for. From 5503877901f86a7a52cd1057c13ef8fa5ba5c575 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Wed, 10 Jan 2024 14:19:56 -0800 Subject: [PATCH 33/62] Updated docs --- README.rst | 2 +- sqlalchemy_bigquery/test.py | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 sqlalchemy_bigquery/test.py diff --git a/README.rst b/README.rst index 123f2106..286821d2 100644 --- a/README.rst +++ b/README.rst @@ -36,7 +36,7 @@ In order to use this library, you first need to go through the following steps: .. note:: This library is a prerelease to gauge compatiblity with SQLAlchemy - versions >= 2.0.0 + versions >= 1.4.16 < 2.1 Installation ------------ diff --git a/sqlalchemy_bigquery/test.py b/sqlalchemy_bigquery/test.py new file mode 100644 index 00000000..c7a3ed6a --- /dev/null +++ b/sqlalchemy_bigquery/test.py @@ -0,0 +1,6 @@ +from sqlalchemy.sql.expression import cast +from sqlalchemy import String, func +code_coding = db.query(func.unnest(self.model.code_coding)).subquery() +conditions.append( + and_(cast(code_coding.c.code, String) == "H", cast(code_coding.c.display, String) == "BLAST") +) \ No newline at end of file From b4010e11304af97a7293b12dfacbdeeab43fc6e6 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Fri, 12 Jan 2024 01:42:56 -0800 Subject: [PATCH 34/62] minor fixes to noxfile and README --- README.rst | 2 +- noxfile.py | 5 ----- samples/snippets/requirements.txt | 2 +- 3 files changed, 2 insertions(+), 7 deletions(-) diff --git a/README.rst b/README.rst index 286821d2..b6693abb 100644 --- a/README.rst +++ b/README.rst @@ -36,7 +36,7 @@ In order to use this library, you first need to go through the following steps: .. note:: This library is a prerelease to gauge compatiblity with SQLAlchemy - versions >= 1.4.16 < 2.1 + versions >= 1.4.16 and < 2.1 Installation ------------ diff --git a/noxfile.py b/noxfile.py index 5a5ed906..c2600f52 100644 --- a/noxfile.py +++ b/noxfile.py @@ -213,8 +213,6 @@ def default(session, install_extras=True): install_target = "." session.install("-e", install_target, "-c", constraints_path) - session.run("python", "-m", "pip", "freeze") - # Run py.test against the unit tests. session.run( "py.test", @@ -419,8 +417,6 @@ def cover(session): """ session.install("coverage", "pytest-cov") - session.run("python", "-m", "pip", "freeze") - session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") @@ -552,7 +548,6 @@ def prerelease_deps(session): "requests", ] session.install(*other_deps) - session.run("python", "-m", "pip", "freeze") # Print out prerelease package versions session.run( diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index ffb51928..f011f19c 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -28,6 +28,6 @@ requests==2.31.0 rsa==4.9 shapely==2.0.2 six==1.16.0 -sqlalchemy===1.4.15 +sqlalchemy==1.4.16 typing-extensions==4.9.0 urllib3==2.1.0 From 1e54e77869a8584f95cc0266853ceadbd20a6443 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 16 Jan 2024 15:38:57 +0000 Subject: [PATCH 35/62] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- noxfile.py | 1 - sqlalchemy_bigquery/test.py | 8 ++++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/noxfile.py b/noxfile.py index 1e1808ab..41666055 100644 --- a/noxfile.py +++ b/noxfile.py @@ -417,7 +417,6 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") diff --git a/sqlalchemy_bigquery/test.py b/sqlalchemy_bigquery/test.py index c7a3ed6a..6c7156fb 100644 --- a/sqlalchemy_bigquery/test.py +++ b/sqlalchemy_bigquery/test.py @@ -1,6 +1,10 @@ from sqlalchemy.sql.expression import cast from sqlalchemy import String, func + code_coding = db.query(func.unnest(self.model.code_coding)).subquery() conditions.append( - and_(cast(code_coding.c.code, String) == "H", cast(code_coding.c.display, String) == "BLAST") -) \ No newline at end of file + and_( + cast(code_coding.c.code, String) == "H", + cast(code_coding.c.display, String) == "BLAST", + ) +) From e95aee1d0ea45f6b0d98de268d30abba3eb9073f Mon Sep 17 00:00:00 2001 From: kiraksi Date: Wed, 17 Jan 2024 03:39:18 -0800 Subject: [PATCH 36/62] cleaned up code from review, removed unnecessary code and files --- sqlalchemy_bigquery/test.py | 10 ---------- .../test_dialect_compliance.py | 8 -------- tests/system/test_sqlalchemy_bigquery.py | 5 ----- 3 files changed, 23 deletions(-) delete mode 100644 sqlalchemy_bigquery/test.py diff --git a/sqlalchemy_bigquery/test.py b/sqlalchemy_bigquery/test.py deleted file mode 100644 index 6c7156fb..00000000 --- a/sqlalchemy_bigquery/test.py +++ /dev/null @@ -1,10 +0,0 @@ -from sqlalchemy.sql.expression import cast -from sqlalchemy import String, func - -code_coding = db.query(func.unnest(self.model.code_coding)).subquery() -conditions.append( - and_( - cast(code_coding.c.code, String) == "H", - cast(code_coding.c.display, String) == "BLAST", - ) -) diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index a8ba194c..822ad1f8 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -85,8 +85,6 @@ def literal(value, type_=None): if type_ is not None: assert type_ is self.datatype - import sqlalchemy.sql.sqltypes - return sqlalchemy.sql.elements.literal(value, self.datatype) with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal): @@ -127,7 +125,6 @@ def test_update(self): # Had to pass in data as a dict object in 2.0 r = connection.execute(t.update().where(t.c.id == 2), dict(data="d2_new")) assert not r.is_insert - # assert not r.returns_rows eq_( connection.execute(t.select().order_by(t.c.id)).fetchall(), @@ -139,7 +136,6 @@ def test_delete(self): connection = config.db.connect() r = connection.execute(t.delete().where(t.c.id == 2)) assert not r.is_insert - # assert not r.returns_rows eq_( connection.execute(t.select().order_by(t.c.id)).fetchall(), [(1, "d1"), (3, "d3")], @@ -526,7 +522,6 @@ def test_update(self): t = self.tables.plain_pk r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new") assert not r.is_insert - # assert not r.returns_rows eq_( config.db.execute(t.select().order_by(t.c.id)).fetchall(), @@ -537,7 +532,6 @@ def test_delete(self): t = self.tables.plain_pk r = config.db.execute(t.delete().where(t.c.id == 2)) assert not r.is_insert - # assert not r.returns_rows eq_( config.db.execute(t.select().order_by(t.c.id)).fetchall(), [(1, "d1"), (3, "d3")], @@ -671,7 +665,6 @@ def test_update(self): t = self.tables.plain_pk r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new") assert not r.is_insert - # assert not r.returns_rows eq_( config.db.execute(t.select().order_by(t.c.id)).fetchall(), @@ -682,7 +675,6 @@ def test_delete(self): t = self.tables.plain_pk r = config.db.execute(t.delete().where(t.c.id == 2)) assert not r.is_insert - # assert not r.returns_rows eq_( config.db.execute(t.select().order_by(t.c.id)).fetchall(), [(1, "d1"), (3, "d3")], diff --git a/tests/system/test_sqlalchemy_bigquery.py b/tests/system/test_sqlalchemy_bigquery.py index 906f4326..457a8ea8 100644 --- a/tests/system/test_sqlalchemy_bigquery.py +++ b/tests/system/test_sqlalchemy_bigquery.py @@ -286,11 +286,6 @@ def test_reflect_select(table, engine_using_test_dataset, table_using_test_datas assert isinstance(table.c["nested_record.record.name"].type, types.String) assert isinstance(table.c.array.type, types.ARRAY) - # Force unique column labels using `use_labels` below to deal - # with BQ sometimes complaining about duplicate column names - # when a destination table is specified, even though no - # destination table is specified. When this test was written, - # `use_labels` was forced by the dialect. with engine_using_test_dataset.connect() as conn: rows = conn.execute( table.select().set_label_style( From b8f8f2133becb3cb43f373939c86dbd03e9a5975 Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Wed, 17 Jan 2024 11:47:07 -0500 Subject: [PATCH 37/62] Update tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py --- tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index 822ad1f8..7677510e 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -122,7 +122,7 @@ class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): def test_update(self): t = self.tables.plain_pk connection = config.db.connect() - # Had to pass in data as a dict object in 2.0 + # In SQLAlchemy 2.0, the datatype changed to dict in the following function. r = connection.execute(t.update().where(t.c.id == 2), dict(data="d2_new")) assert not r.is_insert From dcc89f75ba37f84ea6f5b3c2076c704ed2174390 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Thu, 25 Jan 2024 11:45:15 -0800 Subject: [PATCH 38/62] create development release 1.11.0.dev0 branch --- CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7c3b7ca7..275f5fed 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,13 @@ Older versions of this project were distributed as [pybigquery][0]. [2]: https://pypi.org/project/pybigquery/#history +## [1.11.0.dev0](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.11.0.dev0) (2024-01-25) + + +### Features + +* Drop support for SQLAlchemy versions 1.2 and 1.3, maintain support for 1.4 and add support for 2.0 ([#920](https://github.com/googleapis/python-bigquery-sqlalchemy/pull/920)) +* Fix coverage test issues in SQLAlchemy migration ([#987](https://github.com/googleapis/python-bigquery-sqlalchemy/pull/987)) ## [1.9.0](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.8.0...v1.9.0) (2023-12-10) From 67cb7bd542c220702274dedfa56cf538d051a66c Mon Sep 17 00:00:00 2001 From: Kira Date: Mon, 29 Jan 2024 13:31:58 -0800 Subject: [PATCH 39/62] chore: cleanup compliance tests for sqlalchemy migration (#1013) * chore: remove code for sqlalchemy before 1_4 * reformatted with black: * Removed sqlalchemy compliance tests from versions before 1.4 * removed code in base.py for sqlalchemy < 1.4 * fix coverage issues in base.py * temporarily commented out code lines not passing coverage for testing purposes * replaced functions previously removed for not passing cover * testing removing functions for coverage * add no cover tag to untested code and clean up commented out functions * fix lint issues * black * chore: cleanup compliance file tests after migration * lint * fixed small import error --------- Co-authored-by: Sharoon Thomas --- sqlalchemy_bigquery/_struct.py | 42 +- sqlalchemy_bigquery/base.py | 35 +- sqlalchemy_bigquery/requirements.py | 1 - .../test_dialect_compliance.py | 574 +++++++----------- tests/unit/conftest.py | 12 - tests/unit/test_compiler.py | 6 - tests/unit/test_compliance.py | 4 +- tests/unit/test_select.py | 136 +---- tests/unit/test_sqlalchemy_bigquery.py | 8 +- 9 files changed, 258 insertions(+), 560 deletions(-) diff --git a/sqlalchemy_bigquery/_struct.py b/sqlalchemy_bigquery/_struct.py index 7c084c98..309d1080 100644 --- a/sqlalchemy_bigquery/_struct.py +++ b/sqlalchemy_bigquery/_struct.py @@ -17,20 +17,14 @@ # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -import packaging.version import sqlalchemy.sql.default_comparator import sqlalchemy.sql.sqltypes import sqlalchemy.types from . import base -sqlalchemy_1_4_or_more = packaging.version.parse( - sqlalchemy.__version__ -) >= packaging.version.parse("1.4") - -if sqlalchemy_1_4_or_more: - import sqlalchemy.sql.coercions - import sqlalchemy.sql.roles +import sqlalchemy.sql.coercions +import sqlalchemy.sql.roles def _get_subtype_col_spec(type_): @@ -109,30 +103,14 @@ def __getattr__(self, name): comparator_factory = Comparator -# In the implementations of _field_index below, we're stealing from -# the JSON type implementation, but the code to steal changed in -# 1.4. :/ - -if sqlalchemy_1_4_or_more: - - def _field_index(self, name, operator): - return sqlalchemy.sql.coercions.expect( - sqlalchemy.sql.roles.BinaryElementRole, - name, - expr=self.expr, - operator=operator, - bindparam_type=sqlalchemy.types.String(), - ) - -else: - - def _field_index(self, name, operator): - return sqlalchemy.sql.default_comparator._check_literal( - self.expr, - operator, - name, - bindparam_type=sqlalchemy.types.String(), - ) +def _field_index(self, name, operator): + return sqlalchemy.sql.coercions.expect( + sqlalchemy.sql.roles.BinaryElementRole, + name, + expr=self.expr, + operator=operator, + bindparam_type=sqlalchemy.types.String(), + ) def struct_getitem_op(a, b): diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index da4f18fc..a3d88674 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -163,7 +163,7 @@ def get_insert_default(self, column): # pragma: NO COVER """, flags=re.IGNORECASE | re.VERBOSE, ) - def __distribute_types_to_expanded_placeholders(self, m): + def __distribute_types_to_expanded_placeholders(self, m): # pragma: NO COVER # If we have an in parameter, it sometimes gets expaned to 0 or more # parameters and we need to move the type marker to each # parameter. @@ -174,6 +174,8 @@ def __distribute_types_to_expanded_placeholders(self, m): # suffixes refect that when an array parameter is expanded, # numeric suffixes are added. For example, a placeholder like # `%(foo)s` gets expaneded to `%(foo_0)s, `%(foo_1)s, ...`. + + # Coverage: despite our best efforts, never recognized this segment of code as being tested. placeholders, type_ = m.groups() if placeholders: placeholders = placeholders.replace(")", f":{type_})") @@ -356,11 +358,7 @@ def group_by_clause(self, select, **kw): __sqlalchemy_version_info = packaging.version.parse(sqlalchemy.__version__) - __expanding_text = ( - "EXPANDING" - if __sqlalchemy_version_info < packaging.version.parse("1.4") - else "POSTCOMPILE" - ) + __expanding_text = "POSTCOMPILE" # https://github.com/sqlalchemy/sqlalchemy/commit/f79df12bd6d99b8f6f09d4bf07722638c4b4c159 __expanding_conflict = ( @@ -388,9 +386,6 @@ def visit_in_op_binary(self, binary, operator_, **kw): self._generate_generic_binary(binary, " IN ", **kw) ) - def visit_empty_set_expr(self, element_types, **kw): - return "" - def visit_not_in_op_binary(self, binary, operator, **kw): return ( "(" @@ -424,31 +419,16 @@ def visit_contains_op_binary(self, binary, operator, **kw): self._maybe_reescape(binary), operator, **kw ) - def visit_notcontains_op_binary(self, binary, operator, **kw): - return super(BigQueryCompiler, self).visit_notcontains_op_binary( - self._maybe_reescape(binary), operator, **kw - ) - def visit_startswith_op_binary(self, binary, operator, **kw): return super(BigQueryCompiler, self).visit_startswith_op_binary( self._maybe_reescape(binary), operator, **kw ) - def visit_notstartswith_op_binary(self, binary, operator, **kw): - return super(BigQueryCompiler, self).visit_notstartswith_op_binary( - self._maybe_reescape(binary), operator, **kw - ) - def visit_endswith_op_binary(self, binary, operator, **kw): return super(BigQueryCompiler, self).visit_endswith_op_binary( self._maybe_reescape(binary), operator, **kw ) - def visit_notendswith_op_binary(self, binary, operator, **kw): - return super(BigQueryCompiler, self).visit_notendswith_op_binary( - self._maybe_reescape(binary), operator, **kw - ) - ############################################################################ __placeholder = re.compile(r"%\(([^\]:]+)(:[^\]:]+)?\)s$").match @@ -510,7 +490,8 @@ def visit_bindparam( # here, because then we can't do a recompile later (e.g., first # print the statment, then execute it). See issue #357. # - if getattr(bindparam, "expand_op", None) is not None: + # Coverage: despite our best efforts, never recognized this segment of code as being tested. + if getattr(bindparam, "expand_op", None) is not None: # pragma: NO COVER assert bindparam.expand_op.__name__.endswith("in_op") # in in bindparam = bindparam._clone(maintain_key=True) bindparam.expanding = False @@ -1278,10 +1259,6 @@ def do_rollback(self, dbapi_connection): # BigQuery has no support for transactions. pass - def _check_unicode_returns(self, connection, additional_tests=None): - # requests gives back Unicode strings - return True - def get_view_definition(self, connection, view_name, schema=None, **kw): if isinstance(connection, Engine): connection = connection.connect() diff --git a/sqlalchemy_bigquery/requirements.py b/sqlalchemy_bigquery/requirements.py index af6dec75..118e3946 100644 --- a/sqlalchemy_bigquery/requirements.py +++ b/sqlalchemy_bigquery/requirements.py @@ -24,7 +24,6 @@ import sqlalchemy.testing.requirements import sqlalchemy.testing.exclusions -from sqlalchemy.testing.exclusions import against, only_on supported = sqlalchemy.testing.exclusions.open unsupported = sqlalchemy.testing.exclusions.closed diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index 7677510e..5420bf32 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -29,18 +29,21 @@ import sqlalchemy.testing.suite.test_types import sqlalchemy.sql.sqltypes from sqlalchemy.testing import util, config -from sqlalchemy.testing import is_false -from sqlalchemy.testing import is_true -from sqlalchemy.testing import is_ from sqlalchemy.testing.assertions import eq_ -from sqlalchemy.testing.suite import config, select, exists +from sqlalchemy.testing.suite import select, exists from sqlalchemy.testing.suite import * # noqa +from sqlalchemy.testing.suite import Integer, Table, Column, String, bindparam, testing from sqlalchemy.testing.suite import ( - ComponentReflectionTest as _ComponentReflectionTest, CTETest as _CTETest, ExistsTest as _ExistsTest, + FetchLimitOffsetTest as _FetchLimitOffsetTest, + DifficultParametersTest as _DifficultParametersTest, + DistinctOnTest, + HasIndexTest, + IdentityAutoincrementTest, InsertBehaviorTest as _InsertBehaviorTest, LongNameBlowoutTest, + PostCompileParamsTest, QuotedNameArgumentTest, SimpleUpdateDeleteTest as _SimpleUpdateDeleteTest, TimestampMicrosecondsTest as _TimestampMicrosecondsTest, @@ -53,156 +56,23 @@ from sqlalchemy.testing.suite.test_reflection import ( BizarroCharacterFKResolutionTest, ComponentReflectionTest, - OneConnectionTablesTest, HasTableTest, ) if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse("2.0"): import uuid from sqlalchemy.sql import type_coerce - from sqlalchemy import Uuid from sqlalchemy.testing.suite import ( TrueDivTest as _TrueDivTest, IntegerTest as _IntegerTest, NumericTest as _NumericTest, - DifficultParametersTest as _DifficultParametersTest, - FetchLimitOffsetTest as _FetchLimitOffsetTest, - PostCompileParamsTest, StringTest as _StringTest, UuidTest as _UuidTest, ) - class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): - data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC) - - def test_select_direct(self, connection): - # This func added because this test was failing when passed the - # UTC timezone. - - def literal(value, type_=None): - assert value == self.data - - if type_ is not None: - assert type_ is self.datatype - - return sqlalchemy.sql.elements.literal(value, self.datatype) - - with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal): - super(TimestampMicrosecondsTest, self).test_select_direct(connection) - - def test_round_trip_executemany(self, connection): - unicode_table = self.tables.unicode_table - connection.execute( - unicode_table.insert(), - [{"id": i, "unicode_data": self.data} for i in range(3)], - ) - - rows = connection.execute(select(unicode_table.c.unicode_data)).fetchall() - eq_(rows, [(self.data,) for i in range(3)]) - for row in rows: - # 2.0 had no support for util.text_type - assert isinstance(row[0], str) - - sqlalchemy.testing.suite.test_types._UnicodeFixture.test_round_trip_executemany = ( - test_round_trip_executemany - ) - - class TrueDivTest(_TrueDivTest): - @pytest.mark.skip("BQ rounds based on datatype") - def test_floordiv_integer(self): - pass - - @pytest.mark.skip("BQ rounds based on datatype") - def test_floordiv_integer_bound(self): - pass - - class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): - """The base tests fail if operations return rows for some reason.""" - - def test_update(self): - t = self.tables.plain_pk - connection = config.db.connect() - # In SQLAlchemy 2.0, the datatype changed to dict in the following function. - r = connection.execute(t.update().where(t.c.id == 2), dict(data="d2_new")) - assert not r.is_insert - - eq_( - connection.execute(t.select().order_by(t.c.id)).fetchall(), - [(1, "d1"), (2, "d2_new"), (3, "d3")], - ) - - def test_delete(self): - t = self.tables.plain_pk - connection = config.db.connect() - r = connection.execute(t.delete().where(t.c.id == 2)) - assert not r.is_insert - eq_( - connection.execute(t.select().order_by(t.c.id)).fetchall(), - [(1, "d1"), (3, "d3")], - ) - - class InsertBehaviorTest(_InsertBehaviorTest): - @pytest.mark.skip( - "BQ has no autoinc and client-side defaults can't work for select." - ) - def test_insert_from_select_autoinc(cls): - pass - - @pytest.mark.skip( - "BQ has no autoinc and client-side defaults can't work for select." - ) - def test_no_results_for_non_returning_insert(cls): - pass - - # BQ has no autoinc and client-side defaults can't work for select - del _IntegerTest.test_huge_int_auto_accommodation - - class NumericTest(_NumericTest): - @testing.fixture - def do_numeric_test(self, metadata, connection): - def run(type_, input_, output, filter_=None, check_scale=False): - t = Table("t", metadata, Column("x", type_)) - t.create(connection) - connection.execute(t.insert(), [{"x": x} for x in input_]) - - result = {row[0] for row in connection.execute(t.select())} - output = set(output) - if filter_: - result = {filter_(x) for x in result} - output = {filter_(x) for x in output} - eq_(result, output) - if check_scale: - eq_([str(x) for x in result], [str(x) for x in output]) - - where_expr = True - - # Adding where clause for 2.0 compatibility - connection.execute(t.delete().where(where_expr)) - - # test that this is actually a number! - # note we have tiny scale here as we have tests with very - # small scale Numeric types. PostgreSQL will raise an error - # if you use values outside the available scale. - if type_.asdecimal: - test_value = decimal.Decimal("2.9") - add_value = decimal.Decimal("37.12") - else: - test_value = 2.9 - add_value = 37.12 - - connection.execute(t.insert(), {"x": test_value}) - assert_we_are_a_number = connection.scalar( - select(type_coerce(t.c.x + add_value, type_)) - ) - eq_( - round(assert_we_are_a_number, 3), - round(test_value + add_value, 3), - ) - - return run - class DifficultParametersTest(_DifficultParametersTest): - # removed parameters that dont work with bigquery + """There are some parameters that don't work with bigquery that were removed from this test""" + tough_parameters = testing.combinations( ("boring",), ("per cent",), @@ -307,34 +177,149 @@ def test_standalone_bindparam_escape_expanding( res = connection.scalars(stmt, {paramname: ["d", "a"]}).all() eq_(res, [1, 4]) - class FetchLimitOffsetTest(_FetchLimitOffsetTest): - @pytest.mark.skip("BigQuery doesn't allow an offset without a limit.") - def test_simple_offset(self): + # BQ has no autoinc and client-side defaults can't work for select + del _IntegerTest.test_huge_int_auto_accommodation + + class NumericTest(_NumericTest): + """Added a where clause for BQ compatibility.""" + + @testing.fixture + def do_numeric_test(self, metadata, connection): + def run(type_, input_, output, filter_=None, check_scale=False): + t = Table("t", metadata, Column("x", type_)) + t.create(connection) + connection.execute(t.insert(), [{"x": x} for x in input_]) + + result = {row[0] for row in connection.execute(t.select())} + output = set(output) + if filter_: + result = {filter_(x) for x in result} + output = {filter_(x) for x in output} + eq_(result, output) + if check_scale: + eq_([str(x) for x in result], [str(x) for x in output]) + + where_expr = True + + connection.execute(t.delete().where(where_expr)) + + if type_.asdecimal: + test_value = decimal.Decimal("2.9") + add_value = decimal.Decimal("37.12") + else: + test_value = 2.9 + add_value = 37.12 + + connection.execute(t.insert(), {"x": test_value}) + assert_we_are_a_number = connection.scalar( + select(type_coerce(t.c.x + add_value, type_)) + ) + eq_( + round(assert_we_are_a_number, 3), + round(test_value + add_value, 3), + ) + + return run + + class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): + """BQ has no support for BQ util.text_type""" + + data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC) + + def test_select_direct(self, connection): + # This func added because this test was failing when passed the + # UTC timezone. + + def literal(value, type_=None): + assert value == self.data + + if type_ is not None: + assert type_ is self.datatype + + return sqlalchemy.sql.elements.literal(value, self.datatype) + + with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal): + super(TimestampMicrosecondsTest, self).test_select_direct(connection) + + def test_round_trip_executemany(self, connection): + unicode_table = self.tables.unicode_table + connection.execute( + unicode_table.insert(), + [{"id": i, "unicode_data": self.data} for i in range(3)], + ) + + rows = connection.execute(select(unicode_table.c.unicode_data)).fetchall() + eq_(rows, [(self.data,) for i in range(3)]) + for row in rows: + assert isinstance(row[0], str) + + sqlalchemy.testing.suite.test_types._UnicodeFixture.test_round_trip_executemany = ( + test_round_trip_executemany + ) + + class TrueDivTest(_TrueDivTest): + @pytest.mark.skip("BQ rounds based on datatype") + def test_floordiv_integer(self): pass - test_bound_offset = test_simple_offset - test_expr_offset = test_simple_offset_zero = test_simple_offset - test_limit_offset_nobinds = test_simple_offset # TODO figure out - # how to prevent this from failing - # The original test is missing an order by. + @pytest.mark.skip("BQ rounds based on datatype") + def test_floordiv_integer_bound(self): + pass - # The original test is missing an order by. + class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): + """The base tests fail if operations return rows for some reason.""" - # Also, note that sqlalchemy union is a union distinct, not a - # union all. This test caught that were were getting that wrong. - def test_limit_render_multiple_times(self, connection): - table = self.tables.some_table - stmt = select(table.c.id).order_by(table.c.id).limit(1).scalar_subquery() + def test_update(self): + t = self.tables.plain_pk + connection = config.db.connect() + # In SQLAlchemy 2.0, the datatype changed to dict in the following function. + r = connection.execute(t.update().where(t.c.id == 2), dict(data="d2_new")) + assert not r.is_insert + + eq_( + connection.execute(t.select().order_by(t.c.id)).fetchall(), + [(1, "d1"), (2, "d2_new"), (3, "d3")], + ) + + def test_delete(self): + t = self.tables.plain_pk + connection = config.db.connect() + r = connection.execute(t.delete().where(t.c.id == 2)) + assert not r.is_insert + eq_( + connection.execute(t.select().order_by(t.c.id)).fetchall(), + [(1, "d1"), (3, "d3")], + ) - u = sqlalchemy.union(select(stmt), select(stmt)).subquery().select() + class StringTest(_StringTest): + """Added a where clause for BQ compatibility""" - self._assert_result( - connection, - u, - [(1,)], + def test_dont_truncate_rightside( + self, metadata, connection, expr=None, expected=None + ): + t = Table( + "t", + metadata, + Column("x", String(2)), + Column("id", Integer, primary_key=True), + ) + t.create(connection) + connection.connection.commit() + connection.execute( + t.insert(), + [{"x": "AB", "id": 1}, {"x": "BC", "id": 2}, {"x": "AC", "id": 3}], ) + combinations = [("%B%", ["AB", "BC"]), ("A%C", ["AC"]), ("A%C%Z", [])] + + for args in combinations: + eq_( + connection.scalars(select(t.c.x).where(t.c.x.like(args[0]))).all(), + args[1], + ) class UuidTest(_UuidTest): + """BQ needs to pass in UUID as a string""" + @classmethod def define_tables(cls, metadata): Table( @@ -439,81 +424,38 @@ def test_uuid_returning(self, connection): eq_(row, (data, str_data, data, str_data)) - class StringTest(_StringTest): - def test_dont_truncate_rightside( - self, metadata, connection, expr=None, expected=None - ): - t = Table( - "t", - metadata, - Column("x", String(2)), - Column("id", Integer, primary_key=True), - ) - t.create(connection) - connection.connection.commit() - connection.execute( - t.insert(), - [{"x": "AB", "id": 1}, {"x": "BC", "id": 2}, {"x": "AC", "id": 3}], - ) - combinations = [("%B%", ["AB", "BC"]), ("A%C", ["AC"]), ("A%C%Z", [])] - - for args in combinations: - eq_( - connection.scalars(select(t.c.x).where(t.c.x.like(args[0]))).all(), - args[1], - ) - - # from else statement .... - del DistinctOnTest # expects unquoted table names. - del HasIndexTest # BQ doesn't do the indexes that SQLA is loooking for. - del IdentityAutoincrementTest # BQ doesn't do autoincrement - del PostCompileParamsTest # BQ adds backticks to bind parameters, causing failure of tests TODO: fix this? - -elif packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"): - from sqlalchemy.testing.suite import LimitOffsetTest as _LimitOffsetTest - - class LimitOffsetTest(_LimitOffsetTest): - @pytest.mark.skip("BigQuery doesn't allow an offset without a limit.") - def test_simple_offset(self): - pass - - test_bound_offset = test_simple_offset - - class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): - data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC) - - def test_literal(self): - # The base tests doesn't set up the literal properly, because - # it doesn't pass its datatype to `literal`. - - def literal(value): - assert value == self.data - return sqlalchemy.sql.elements.literal(value, self.datatype) - - with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal): - super(TimestampMicrosecondsTest, self).test_literal() - - def test_select_direct(self, connection): - # This func added because this test was failing when passed the - # UTC timezone. - - def literal(value, type_=None): - assert value == self.data +else: + from sqlalchemy.testing.suite import ( + RowCountTest as _RowCountTest, + ) - if type_ is not None: - assert type_ is self.datatype + del DifficultParametersTest # exercises column names illegal in BQ - return sqlalchemy.sql.elements.literal(value, self.datatype) + class RowCountTest(_RowCountTest): + """""" - with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal): - super(TimestampMicrosecondsTest, self).test_select_direct(connection) + @classmethod + def insert_data(cls, connection): + cls.data = data = [ + ("Angela", "A"), + ("Andrew", "A"), + ("Anand", "A"), + ("Bob", "B"), + ("Bobette", "B"), + ("Buffy", "B"), + ("Charlie", "C"), + ("Cynthia", "C"), + ("Chris", "C"), + ] - class InsertBehaviorTest(_InsertBehaviorTest): - @pytest.mark.skip( - "BQ has no autoinc and client-side defaults can't work for select." - ) - def test_insert_from_select_autoinc(cls): - pass + employees_table = cls.tables.employees + connection.execute( + employees_table.insert(), + [ + {"employee_id": i, "name": n, "department": d} + for i, (n, d) in enumerate(data) + ], + ) class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): """The base tests fail if operations return rows for some reason.""" @@ -537,46 +479,6 @@ def test_delete(self): [(1, "d1"), (3, "d3")], ) -else: - from sqlalchemy.testing.suite import ( - FetchLimitOffsetTest as _FetchLimitOffsetTest, - RowCountTest as _RowCountTest, - ) - - class FetchLimitOffsetTest(_FetchLimitOffsetTest): - @pytest.mark.skip("BigQuery doesn't allow an offset without a limit.") - def test_simple_offset(self): - pass - - test_bound_offset = test_simple_offset - test_expr_offset = test_simple_offset_zero = test_simple_offset - test_limit_offset_nobinds = test_simple_offset # TODO figure out - # how to prevent this from failing - # The original test is missing an order by. - - # Also, note that sqlalchemy union is a union distinct, not a - # union all. This test caught that were were getting that wrong. - def test_limit_render_multiple_times(self, connection): - table = self.tables.some_table - stmt = select(table.c.id).order_by(table.c.id).limit(1).scalar_subquery() - - u = sqlalchemy.union(select(stmt), select(stmt)).subquery().select() - - self._assert_result( - connection, - u, - [(1,)], - ) - - del DifficultParametersTest # exercises column names illegal in BQ - del DistinctOnTest # expects unquoted table names. - del HasIndexTest # BQ doesn't do the indexes that SQLA is loooking for. - del IdentityAutoincrementTest # BQ doesn't do autoincrement - - # This test makes makes assertions about generated sql and trips - # over the backquotes that we add everywhere. XXX Why do we do that? - del PostCompileParamsTest - class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC) @@ -627,70 +529,15 @@ def test_round_trip_executemany(self, connection): test_round_trip_executemany ) - class RowCountTest(_RowCountTest): - @classmethod - def insert_data(cls, connection): - cls.data = data = [ - ("Angela", "A"), - ("Andrew", "A"), - ("Anand", "A"), - ("Bob", "B"), - ("Bobette", "B"), - ("Buffy", "B"), - ("Charlie", "C"), - ("Cynthia", "C"), - ("Chris", "C"), - ] - - employees_table = cls.tables.employees - connection.execute( - employees_table.insert(), - [ - {"employee_id": i, "name": n, "department": d} - for i, (n, d) in enumerate(data) - ], - ) - - class InsertBehaviorTest(_InsertBehaviorTest): - @pytest.mark.skip( - "BQ has no autoinc and client-side defaults can't work for select." - ) - def test_insert_from_select_autoinc(cls): - pass - - class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): - """The base tests fail if operations return rows for some reason.""" - - def test_update(self): - t = self.tables.plain_pk - r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new") - assert not r.is_insert - - eq_( - config.db.execute(t.select().order_by(t.c.id)).fetchall(), - [(1, "d1"), (2, "d2_new"), (3, "d3")], - ) - - def test_delete(self): - t = self.tables.plain_pk - r = config.db.execute(t.delete().where(t.c.id == 2)) - assert not r.is_insert - eq_( - config.db.execute(t.select().order_by(t.c.id)).fetchall(), - [(1, "d1"), (3, "d3")], - ) - - -# Quotes aren't allowed in BigQuery table names. -del QuotedNameArgumentTest +class CTETest(_CTETest): + @pytest.mark.skip("Can't use CTEs with insert") + def test_insert_from_select_round_trip(self): + pass -# class InsertBehaviorTest(_InsertBehaviorTest): -# @pytest.mark.skip( -# "BQ has no autoinc and client-side defaults can't work for select." -# ) -# def test_insert_from_select_autoinc(cls): -# pass + @pytest.mark.skip("Recusive CTEs aren't supported.") + def test_select_recursive_round_trip(self): + pass class ExistsTest(_ExistsTest): @@ -725,42 +572,43 @@ def test_select_exists_false(self, connection): ) -# This test requires features (indexes, primary keys, etc., that BigQuery doesn't have. -del LongNameBlowoutTest - +class FetchLimitOffsetTest(_FetchLimitOffsetTest): + @pytest.mark.skip("BigQuery doesn't allow an offset without a limit.") + def test_simple_offset(self): + pass -# class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): -# """The base tests fail if operations return rows for some reason.""" + test_bound_offset = test_simple_offset + test_expr_offset = test_simple_offset_zero = test_simple_offset + test_limit_offset_nobinds = test_simple_offset # TODO figure out + # how to prevent this from failing + # The original test is missing an order by. -# def test_update(self): -# t = self.tables.plain_pk -# r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new") -# assert not r.is_insert -# # assert not r.returns_rows + # Also, note that sqlalchemy union is a union distinct, not a + # union all. This test caught that were were getting that wrong. + def test_limit_render_multiple_times(self, connection): + table = self.tables.some_table + stmt = select(table.c.id).order_by(table.c.id).limit(1).scalar_subquery() -# eq_( -# config.db.execute(t.select().order_by(t.c.id)).fetchall(), -# [(1, "d1"), (2, "d2_new"), (3, "d3")], -# ) + u = sqlalchemy.union(select(stmt), select(stmt)).subquery().select() -# def test_delete(self): -# t = self.tables.plain_pk -# r = config.db.execute(t.delete().where(t.c.id == 2)) -# assert not r.is_insert -# # assert not r.returns_rows -# eq_( -# config.db.execute(t.select().order_by(t.c.id)).fetchall(), -# [(1, "d1"), (3, "d3")], -# ) + self._assert_result( + connection, + u, + [(1,)], + ) -class CTETest(_CTETest): - @pytest.mark.skip("Can't use CTEs with insert") - def test_insert_from_select_round_trip(self): +class InsertBehaviorTest(_InsertBehaviorTest): + @pytest.mark.skip( + "BQ has no autoinc and client-side defaults can't work for select." + ) + def test_insert_from_select_autoinc(cls): pass - @pytest.mark.skip("Recusive CTEs aren't supported.") - def test_select_recursive_round_trip(self): + @pytest.mark.skip( + "BQ has no autoinc and client-side defaults can't work for select." + ) + def test_no_results_for_non_returning_insert(cls): pass @@ -780,3 +628,9 @@ def test_select_recursive_round_trip(self): del ArrayTest # only appears to apply to postgresql del BizarroCharacterFKResolutionTest del HasTableTest.test_has_table_cache # TODO confirm whether BQ has table caching +del DistinctOnTest # expects unquoted table names. +del HasIndexTest # BQ doesn't do the indexes that SQLA is loooking for. +del IdentityAutoincrementTest # BQ doesn't do autoincrement +del LongNameBlowoutTest # Requires features (indexes, primary keys, etc., that BigQuery doesn't have. +del PostCompileParamsTest # BQ adds backticks to bind parameters, causing failure of tests TODO: fix this? +del QuotedNameArgumentTest # Quotes aren't allowed in BigQuery table names. diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 2371b80b..c75113a9 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -30,18 +30,6 @@ from . import fauxdbi sqlalchemy_version = packaging.version.parse(sqlalchemy.__version__) -sqlalchemy_1_3_or_higher = pytest.mark.skipif( - sqlalchemy_version < packaging.version.parse("1.3"), - reason="requires sqlalchemy 1.3 or higher", -) -sqlalchemy_1_4_or_higher = pytest.mark.skipif( - sqlalchemy_version < packaging.version.parse("1.4"), - reason="requires sqlalchemy 1.4 or higher", -) -sqlalchemy_before_1_4 = pytest.mark.skipif( - sqlalchemy_version >= packaging.version.parse("1.4"), - reason="requires sqlalchemy 1.3 or lower", -) sqlalchemy_before_2_0 = pytest.mark.skipif( sqlalchemy_version >= packaging.version.parse("2.0"), reason="requires sqlalchemy 1.3 or lower", diff --git a/tests/unit/test_compiler.py b/tests/unit/test_compiler.py index 19993761..5ac71485 100644 --- a/tests/unit/test_compiler.py +++ b/tests/unit/test_compiler.py @@ -22,8 +22,6 @@ from .conftest import setup_table from .conftest import ( - sqlalchemy_1_4_or_higher, - sqlalchemy_before_1_4, sqlalchemy_2_0_or_higher, sqlalchemy_before_2_0, ) @@ -63,7 +61,6 @@ def test_cant_compile_unnamed_column(faux_conn, metadata): sqlalchemy.Column(sqlalchemy.Integer).compile(faux_conn) -@sqlalchemy_1_4_or_higher def test_no_alias_for_known_tables(faux_conn, metadata): # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/353 table = setup_table( @@ -85,7 +82,6 @@ def test_no_alias_for_known_tables(faux_conn, metadata): assert found_sql == expected_sql -@sqlalchemy_1_4_or_higher def test_no_alias_for_known_tables_cte(faux_conn, metadata): # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 table = setup_table( @@ -239,7 +235,6 @@ def test_no_implicit_join_for_inner_unnest(faux_conn, metadata): assert found_outer_sql == expected_outer_sql -@sqlalchemy_1_4_or_higher def test_no_implicit_join_asterix_for_inner_unnest_no_table2_column( faux_conn, metadata ): @@ -264,7 +259,6 @@ def test_no_implicit_join_asterix_for_inner_unnest_no_table2_column( assert found_outer_sql == expected_outer_sql -@sqlalchemy_1_4_or_higher def test_no_implicit_join_for_inner_unnest_no_table2_column(faux_conn, metadata): # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 q = prepare_implicit_join_base_query(faux_conn, metadata, False, False) diff --git a/tests/unit/test_compliance.py b/tests/unit/test_compliance.py index 630d5058..bd90d936 100644 --- a/tests/unit/test_compliance.py +++ b/tests/unit/test_compliance.py @@ -27,7 +27,7 @@ from sqlalchemy import Column, Integer, literal_column, select, String, Table, union from sqlalchemy.testing.assertions import eq_, in_ -from .conftest import setup_table, sqlalchemy_1_3_or_higher +from .conftest import setup_table def assert_result(connection, sel, expected, params=()): @@ -106,7 +106,6 @@ def test_percent_sign_round_trip(faux_conn, metadata): ) -@sqlalchemy_1_3_or_higher def test_empty_set_against_integer(faux_conn): table = some_table(faux_conn) @@ -119,7 +118,6 @@ def test_empty_set_against_integer(faux_conn): assert_result(faux_conn, stmt, [], params={"q": []}) -@sqlalchemy_1_3_or_higher def test_null_in_empty_set_is_false(faux_conn): stmt = select( sqlalchemy.case( diff --git a/tests/unit/test_select.py b/tests/unit/test_select.py index 55acf4a0..ad80047a 100644 --- a/tests/unit/test_select.py +++ b/tests/unit/test_select.py @@ -20,20 +20,13 @@ import datetime from decimal import Decimal -import packaging.version import pytest import sqlalchemy from sqlalchemy import not_ import sqlalchemy_bigquery -from .conftest import ( - setup_table, - sqlalchemy_version, - sqlalchemy_1_3_or_higher, - sqlalchemy_1_4_or_higher, - sqlalchemy_before_1_4, -) +from .conftest import setup_table def test_labels_not_forced(faux_conn): @@ -225,20 +218,6 @@ def test_disable_quote(faux_conn): assert faux_conn.test_data["execute"][-1][0] == ("SELECT `t`.foo \nFROM `t`") -@sqlalchemy_before_1_4 -def test_select_in_lit_13(faux_conn): - [[isin]] = faux_conn.execute( - sqlalchemy.select(sqlalchemy.literal(1).in_([1, 2, 3])) - ) - assert isin - assert faux_conn.test_data["execute"][-1] == ( - "SELECT %(param_1:INT64)s IN " - "(%(param_2:INT64)s, %(param_3:INT64)s, %(param_4:INT64)s) AS `anon_1`", - {"param_1": 1, "param_2": 1, "param_3": 2, "param_4": 3}, - ) - - -@sqlalchemy_1_4_or_higher def test_select_in_lit(faux_conn, last_query): faux_conn.execute(sqlalchemy.select(sqlalchemy.literal(1).in_([1, 2, 3]))) last_query( @@ -248,81 +227,45 @@ def test_select_in_lit(faux_conn, last_query): def test_select_in_param(faux_conn, last_query): - [[isin]] = faux_conn.execute( + faux_conn.execute( sqlalchemy.select( sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[1, 2, 3]), ) - if sqlalchemy_version >= packaging.version.parse("1.4"): - last_query( - "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", - {"param_1": 1, "q": [1, 2, 3]}, - ) - else: - assert isin - last_query( - "SELECT %(param_1:INT64)s IN UNNEST(" - "[ %(q_1:INT64)s, %(q_2:INT64)s, %(q_3:INT64)s ]" - ") AS `anon_1`", - {"param_1": 1, "q_1": 1, "q_2": 2, "q_3": 3}, - ) + + last_query( + "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", + {"param_1": 1, "q": [1, 2, 3]}, + ) def test_select_in_param1(faux_conn, last_query): - [[isin]] = faux_conn.execute( + faux_conn.execute( sqlalchemy.select( sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[1]), ) - if sqlalchemy_version >= packaging.version.parse("1.4"): - last_query( - "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", - {"param_1": 1, "q": [1]}, - ) - else: - assert isin - last_query( - "SELECT %(param_1:INT64)s IN UNNEST(" "[ %(q_1:INT64)s ]" ") AS `anon_1`", - {"param_1": 1, "q_1": 1}, - ) + last_query( + "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", + {"param_1": 1, "q": [1]}, + ) -@sqlalchemy_1_3_or_higher def test_select_in_param_empty(faux_conn, last_query): - [[isin]] = faux_conn.execute( + faux_conn.execute( sqlalchemy.select( sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[]), ) - if sqlalchemy_version >= packaging.version.parse("1.4"): - last_query( - "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", - {"param_1": 1, "q": []}, - ) - else: - assert not isin - last_query( - "SELECT %(param_1:INT64)s IN UNNEST([ ]) AS `anon_1`", {"param_1": 1} - ) - - -@sqlalchemy_before_1_4 -def test_select_notin_lit13(faux_conn): - [[isnotin]] = faux_conn.execute( - sqlalchemy.select(sqlalchemy.literal(0).notin_([1, 2, 3])) - ) - assert isnotin - assert faux_conn.test_data["execute"][-1] == ( - "SELECT (%(param_1:INT64)s NOT IN " - "(%(param_2:INT64)s, %(param_3:INT64)s, %(param_4:INT64)s)) AS `anon_1`", - {"param_1": 0, "param_2": 1, "param_3": 2, "param_4": 3}, + last_query( + "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", + {"param_1": 1, "q": []}, ) -@sqlalchemy_1_4_or_higher def test_select_notin_lit(faux_conn, last_query): faux_conn.execute(sqlalchemy.select(sqlalchemy.literal(0).notin_([1, 2, 3]))) last_query( @@ -332,45 +275,29 @@ def test_select_notin_lit(faux_conn, last_query): def test_select_notin_param(faux_conn, last_query): - [[isnotin]] = faux_conn.execute( + faux_conn.execute( sqlalchemy.select( sqlalchemy.literal(1).notin_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[1, 2, 3]), ) - if sqlalchemy_version >= packaging.version.parse("1.4"): - last_query( - "SELECT (%(param_1:INT64)s NOT IN UNNEST(%(q:INT64)s)) AS `anon_1`", - {"param_1": 1, "q": [1, 2, 3]}, - ) - else: - assert not isnotin - last_query( - "SELECT (%(param_1:INT64)s NOT IN UNNEST(" - "[ %(q_1:INT64)s, %(q_2:INT64)s, %(q_3:INT64)s ]" - ")) AS `anon_1`", - {"param_1": 1, "q_1": 1, "q_2": 2, "q_3": 3}, - ) + last_query( + "SELECT (%(param_1:INT64)s NOT IN UNNEST(%(q:INT64)s)) AS `anon_1`", + {"param_1": 1, "q": [1, 2, 3]}, + ) -@sqlalchemy_1_3_or_higher def test_select_notin_param_empty(faux_conn, last_query): - [[isnotin]] = faux_conn.execute( + faux_conn.execute( sqlalchemy.select( sqlalchemy.literal(1).notin_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[]), ) - if sqlalchemy_version >= packaging.version.parse("1.4"): - last_query( - "SELECT (%(param_1:INT64)s NOT IN UNNEST(%(q:INT64)s)) AS `anon_1`", - {"param_1": 1, "q": []}, - ) - else: - assert isnotin - last_query( - "SELECT (%(param_1:INT64)s NOT IN UNNEST([ ])) AS `anon_1`", {"param_1": 1} - ) + last_query( + "SELECT (%(param_1:INT64)s NOT IN UNNEST(%(q:INT64)s)) AS `anon_1`", + {"param_1": 1, "q": []}, + ) def test_literal_binds_kwarg_with_an_IN_operator_252(faux_conn): @@ -391,7 +318,6 @@ def nstr(q): ) -@sqlalchemy_1_4_or_higher @pytest.mark.parametrize("alias", [True, False]) def test_unnest(faux_conn, alias): from sqlalchemy import String @@ -409,7 +335,6 @@ def test_unnest(faux_conn, alias): ) -@sqlalchemy_1_4_or_higher @pytest.mark.parametrize("alias", [True, False]) def test_table_valued_alias_w_multiple_references_to_the_same_table(faux_conn, alias): from sqlalchemy import String @@ -428,7 +353,6 @@ def test_table_valued_alias_w_multiple_references_to_the_same_table(faux_conn, a ) -@sqlalchemy_1_4_or_higher @pytest.mark.parametrize("alias", [True, False]) def test_unnest_w_no_table_references(faux_conn, alias): fcall = sqlalchemy.func.unnest([1, 2, 3]) @@ -452,10 +376,6 @@ def test_array_indexing(faux_conn, metadata): assert got == "SELECT `t`.`a`[OFFSET(%(a_1:INT64)s)] AS `anon_1` \nFROM `t`" -@pytest.mark.skipif( - packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"), - reason="regexp_match support requires version 1.4 or higher", -) def test_visit_regexp_match_op_binary(faux_conn): table = setup_table( faux_conn, @@ -472,10 +392,6 @@ def test_visit_regexp_match_op_binary(faux_conn): assert result == expected -@pytest.mark.skipif( - packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"), - reason="regexp_match support requires version 1.4 or higher", -) def test_visit_not_regexp_match_op_binary(faux_conn): table = setup_table( faux_conn, diff --git a/tests/unit/test_sqlalchemy_bigquery.py b/tests/unit/test_sqlalchemy_bigquery.py index d64e1b97..db20e2f0 100644 --- a/tests/unit/test_sqlalchemy_bigquery.py +++ b/tests/unit/test_sqlalchemy_bigquery.py @@ -10,7 +10,6 @@ from google.cloud import bigquery from google.cloud.bigquery.dataset import DatasetListItem from google.cloud.bigquery.table import TableListItem -import packaging.version import pytest import sqlalchemy @@ -227,12 +226,7 @@ def test_unnest_function(args, kw): f = sqlalchemy.func.unnest(*args, **kw) assert isinstance(f.type, sqlalchemy.String) - if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse( - "1.4" - ): - assert isinstance( - sqlalchemy.select(f).subquery().c.unnest.type, sqlalchemy.String - ) + assert isinstance(sqlalchemy.select(f).subquery().c.unnest.type, sqlalchemy.String) @mock.patch("sqlalchemy_bigquery._helpers.create_bigquery_client") From f3cb2d1f93473e5c5aa509367a7a7c3eb5433611 Mon Sep 17 00:00:00 2001 From: Kira Date: Mon, 29 Jan 2024 14:41:32 -0800 Subject: [PATCH 40/62] Revert "chore: cleanup compliance tests for sqlalchemy migration" (#1015) --- sqlalchemy_bigquery/_struct.py | 42 +- sqlalchemy_bigquery/base.py | 35 +- sqlalchemy_bigquery/requirements.py | 1 + .../test_dialect_compliance.py | 574 +++++++++++------- tests/unit/conftest.py | 12 + tests/unit/test_compiler.py | 6 + tests/unit/test_compliance.py | 4 +- tests/unit/test_select.py | 136 ++++- tests/unit/test_sqlalchemy_bigquery.py | 8 +- 9 files changed, 560 insertions(+), 258 deletions(-) diff --git a/sqlalchemy_bigquery/_struct.py b/sqlalchemy_bigquery/_struct.py index 309d1080..7c084c98 100644 --- a/sqlalchemy_bigquery/_struct.py +++ b/sqlalchemy_bigquery/_struct.py @@ -17,14 +17,20 @@ # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +import packaging.version import sqlalchemy.sql.default_comparator import sqlalchemy.sql.sqltypes import sqlalchemy.types from . import base -import sqlalchemy.sql.coercions -import sqlalchemy.sql.roles +sqlalchemy_1_4_or_more = packaging.version.parse( + sqlalchemy.__version__ +) >= packaging.version.parse("1.4") + +if sqlalchemy_1_4_or_more: + import sqlalchemy.sql.coercions + import sqlalchemy.sql.roles def _get_subtype_col_spec(type_): @@ -103,14 +109,30 @@ def __getattr__(self, name): comparator_factory = Comparator -def _field_index(self, name, operator): - return sqlalchemy.sql.coercions.expect( - sqlalchemy.sql.roles.BinaryElementRole, - name, - expr=self.expr, - operator=operator, - bindparam_type=sqlalchemy.types.String(), - ) +# In the implementations of _field_index below, we're stealing from +# the JSON type implementation, but the code to steal changed in +# 1.4. :/ + +if sqlalchemy_1_4_or_more: + + def _field_index(self, name, operator): + return sqlalchemy.sql.coercions.expect( + sqlalchemy.sql.roles.BinaryElementRole, + name, + expr=self.expr, + operator=operator, + bindparam_type=sqlalchemy.types.String(), + ) + +else: + + def _field_index(self, name, operator): + return sqlalchemy.sql.default_comparator._check_literal( + self.expr, + operator, + name, + bindparam_type=sqlalchemy.types.String(), + ) def struct_getitem_op(a, b): diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index a3d88674..da4f18fc 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -163,7 +163,7 @@ def get_insert_default(self, column): # pragma: NO COVER """, flags=re.IGNORECASE | re.VERBOSE, ) - def __distribute_types_to_expanded_placeholders(self, m): # pragma: NO COVER + def __distribute_types_to_expanded_placeholders(self, m): # If we have an in parameter, it sometimes gets expaned to 0 or more # parameters and we need to move the type marker to each # parameter. @@ -174,8 +174,6 @@ def __distribute_types_to_expanded_placeholders(self, m): # pragma: NO COVER # suffixes refect that when an array parameter is expanded, # numeric suffixes are added. For example, a placeholder like # `%(foo)s` gets expaneded to `%(foo_0)s, `%(foo_1)s, ...`. - - # Coverage: despite our best efforts, never recognized this segment of code as being tested. placeholders, type_ = m.groups() if placeholders: placeholders = placeholders.replace(")", f":{type_})") @@ -358,7 +356,11 @@ def group_by_clause(self, select, **kw): __sqlalchemy_version_info = packaging.version.parse(sqlalchemy.__version__) - __expanding_text = "POSTCOMPILE" + __expanding_text = ( + "EXPANDING" + if __sqlalchemy_version_info < packaging.version.parse("1.4") + else "POSTCOMPILE" + ) # https://github.com/sqlalchemy/sqlalchemy/commit/f79df12bd6d99b8f6f09d4bf07722638c4b4c159 __expanding_conflict = ( @@ -386,6 +388,9 @@ def visit_in_op_binary(self, binary, operator_, **kw): self._generate_generic_binary(binary, " IN ", **kw) ) + def visit_empty_set_expr(self, element_types, **kw): + return "" + def visit_not_in_op_binary(self, binary, operator, **kw): return ( "(" @@ -419,16 +424,31 @@ def visit_contains_op_binary(self, binary, operator, **kw): self._maybe_reescape(binary), operator, **kw ) + def visit_notcontains_op_binary(self, binary, operator, **kw): + return super(BigQueryCompiler, self).visit_notcontains_op_binary( + self._maybe_reescape(binary), operator, **kw + ) + def visit_startswith_op_binary(self, binary, operator, **kw): return super(BigQueryCompiler, self).visit_startswith_op_binary( self._maybe_reescape(binary), operator, **kw ) + def visit_notstartswith_op_binary(self, binary, operator, **kw): + return super(BigQueryCompiler, self).visit_notstartswith_op_binary( + self._maybe_reescape(binary), operator, **kw + ) + def visit_endswith_op_binary(self, binary, operator, **kw): return super(BigQueryCompiler, self).visit_endswith_op_binary( self._maybe_reescape(binary), operator, **kw ) + def visit_notendswith_op_binary(self, binary, operator, **kw): + return super(BigQueryCompiler, self).visit_notendswith_op_binary( + self._maybe_reescape(binary), operator, **kw + ) + ############################################################################ __placeholder = re.compile(r"%\(([^\]:]+)(:[^\]:]+)?\)s$").match @@ -490,8 +510,7 @@ def visit_bindparam( # here, because then we can't do a recompile later (e.g., first # print the statment, then execute it). See issue #357. # - # Coverage: despite our best efforts, never recognized this segment of code as being tested. - if getattr(bindparam, "expand_op", None) is not None: # pragma: NO COVER + if getattr(bindparam, "expand_op", None) is not None: assert bindparam.expand_op.__name__.endswith("in_op") # in in bindparam = bindparam._clone(maintain_key=True) bindparam.expanding = False @@ -1259,6 +1278,10 @@ def do_rollback(self, dbapi_connection): # BigQuery has no support for transactions. pass + def _check_unicode_returns(self, connection, additional_tests=None): + # requests gives back Unicode strings + return True + def get_view_definition(self, connection, view_name, schema=None, **kw): if isinstance(connection, Engine): connection = connection.connect() diff --git a/sqlalchemy_bigquery/requirements.py b/sqlalchemy_bigquery/requirements.py index 118e3946..af6dec75 100644 --- a/sqlalchemy_bigquery/requirements.py +++ b/sqlalchemy_bigquery/requirements.py @@ -24,6 +24,7 @@ import sqlalchemy.testing.requirements import sqlalchemy.testing.exclusions +from sqlalchemy.testing.exclusions import against, only_on supported = sqlalchemy.testing.exclusions.open unsupported = sqlalchemy.testing.exclusions.closed diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index 5420bf32..7677510e 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -29,21 +29,18 @@ import sqlalchemy.testing.suite.test_types import sqlalchemy.sql.sqltypes from sqlalchemy.testing import util, config +from sqlalchemy.testing import is_false +from sqlalchemy.testing import is_true +from sqlalchemy.testing import is_ from sqlalchemy.testing.assertions import eq_ -from sqlalchemy.testing.suite import select, exists +from sqlalchemy.testing.suite import config, select, exists from sqlalchemy.testing.suite import * # noqa -from sqlalchemy.testing.suite import Integer, Table, Column, String, bindparam, testing from sqlalchemy.testing.suite import ( + ComponentReflectionTest as _ComponentReflectionTest, CTETest as _CTETest, ExistsTest as _ExistsTest, - FetchLimitOffsetTest as _FetchLimitOffsetTest, - DifficultParametersTest as _DifficultParametersTest, - DistinctOnTest, - HasIndexTest, - IdentityAutoincrementTest, InsertBehaviorTest as _InsertBehaviorTest, LongNameBlowoutTest, - PostCompileParamsTest, QuotedNameArgumentTest, SimpleUpdateDeleteTest as _SimpleUpdateDeleteTest, TimestampMicrosecondsTest as _TimestampMicrosecondsTest, @@ -56,23 +53,156 @@ from sqlalchemy.testing.suite.test_reflection import ( BizarroCharacterFKResolutionTest, ComponentReflectionTest, + OneConnectionTablesTest, HasTableTest, ) if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse("2.0"): import uuid from sqlalchemy.sql import type_coerce + from sqlalchemy import Uuid from sqlalchemy.testing.suite import ( TrueDivTest as _TrueDivTest, IntegerTest as _IntegerTest, NumericTest as _NumericTest, + DifficultParametersTest as _DifficultParametersTest, + FetchLimitOffsetTest as _FetchLimitOffsetTest, + PostCompileParamsTest, StringTest as _StringTest, UuidTest as _UuidTest, ) - class DifficultParametersTest(_DifficultParametersTest): - """There are some parameters that don't work with bigquery that were removed from this test""" + class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): + data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC) + + def test_select_direct(self, connection): + # This func added because this test was failing when passed the + # UTC timezone. + + def literal(value, type_=None): + assert value == self.data + + if type_ is not None: + assert type_ is self.datatype + + return sqlalchemy.sql.elements.literal(value, self.datatype) + + with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal): + super(TimestampMicrosecondsTest, self).test_select_direct(connection) + + def test_round_trip_executemany(self, connection): + unicode_table = self.tables.unicode_table + connection.execute( + unicode_table.insert(), + [{"id": i, "unicode_data": self.data} for i in range(3)], + ) + + rows = connection.execute(select(unicode_table.c.unicode_data)).fetchall() + eq_(rows, [(self.data,) for i in range(3)]) + for row in rows: + # 2.0 had no support for util.text_type + assert isinstance(row[0], str) + + sqlalchemy.testing.suite.test_types._UnicodeFixture.test_round_trip_executemany = ( + test_round_trip_executemany + ) + + class TrueDivTest(_TrueDivTest): + @pytest.mark.skip("BQ rounds based on datatype") + def test_floordiv_integer(self): + pass + + @pytest.mark.skip("BQ rounds based on datatype") + def test_floordiv_integer_bound(self): + pass + + class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): + """The base tests fail if operations return rows for some reason.""" + + def test_update(self): + t = self.tables.plain_pk + connection = config.db.connect() + # In SQLAlchemy 2.0, the datatype changed to dict in the following function. + r = connection.execute(t.update().where(t.c.id == 2), dict(data="d2_new")) + assert not r.is_insert + + eq_( + connection.execute(t.select().order_by(t.c.id)).fetchall(), + [(1, "d1"), (2, "d2_new"), (3, "d3")], + ) + + def test_delete(self): + t = self.tables.plain_pk + connection = config.db.connect() + r = connection.execute(t.delete().where(t.c.id == 2)) + assert not r.is_insert + eq_( + connection.execute(t.select().order_by(t.c.id)).fetchall(), + [(1, "d1"), (3, "d3")], + ) + + class InsertBehaviorTest(_InsertBehaviorTest): + @pytest.mark.skip( + "BQ has no autoinc and client-side defaults can't work for select." + ) + def test_insert_from_select_autoinc(cls): + pass + + @pytest.mark.skip( + "BQ has no autoinc and client-side defaults can't work for select." + ) + def test_no_results_for_non_returning_insert(cls): + pass + + # BQ has no autoinc and client-side defaults can't work for select + del _IntegerTest.test_huge_int_auto_accommodation + + class NumericTest(_NumericTest): + @testing.fixture + def do_numeric_test(self, metadata, connection): + def run(type_, input_, output, filter_=None, check_scale=False): + t = Table("t", metadata, Column("x", type_)) + t.create(connection) + connection.execute(t.insert(), [{"x": x} for x in input_]) + + result = {row[0] for row in connection.execute(t.select())} + output = set(output) + if filter_: + result = {filter_(x) for x in result} + output = {filter_(x) for x in output} + eq_(result, output) + if check_scale: + eq_([str(x) for x in result], [str(x) for x in output]) + + where_expr = True + + # Adding where clause for 2.0 compatibility + connection.execute(t.delete().where(where_expr)) + + # test that this is actually a number! + # note we have tiny scale here as we have tests with very + # small scale Numeric types. PostgreSQL will raise an error + # if you use values outside the available scale. + if type_.asdecimal: + test_value = decimal.Decimal("2.9") + add_value = decimal.Decimal("37.12") + else: + test_value = 2.9 + add_value = 37.12 + + connection.execute(t.insert(), {"x": test_value}) + assert_we_are_a_number = connection.scalar( + select(type_coerce(t.c.x + add_value, type_)) + ) + eq_( + round(assert_we_are_a_number, 3), + round(test_value + add_value, 3), + ) + + return run + class DifficultParametersTest(_DifficultParametersTest): + # removed parameters that dont work with bigquery tough_parameters = testing.combinations( ("boring",), ("per cent",), @@ -177,149 +307,34 @@ def test_standalone_bindparam_escape_expanding( res = connection.scalars(stmt, {paramname: ["d", "a"]}).all() eq_(res, [1, 4]) - # BQ has no autoinc and client-side defaults can't work for select - del _IntegerTest.test_huge_int_auto_accommodation - - class NumericTest(_NumericTest): - """Added a where clause for BQ compatibility.""" - - @testing.fixture - def do_numeric_test(self, metadata, connection): - def run(type_, input_, output, filter_=None, check_scale=False): - t = Table("t", metadata, Column("x", type_)) - t.create(connection) - connection.execute(t.insert(), [{"x": x} for x in input_]) - - result = {row[0] for row in connection.execute(t.select())} - output = set(output) - if filter_: - result = {filter_(x) for x in result} - output = {filter_(x) for x in output} - eq_(result, output) - if check_scale: - eq_([str(x) for x in result], [str(x) for x in output]) - - where_expr = True - - connection.execute(t.delete().where(where_expr)) - - if type_.asdecimal: - test_value = decimal.Decimal("2.9") - add_value = decimal.Decimal("37.12") - else: - test_value = 2.9 - add_value = 37.12 - - connection.execute(t.insert(), {"x": test_value}) - assert_we_are_a_number = connection.scalar( - select(type_coerce(t.c.x + add_value, type_)) - ) - eq_( - round(assert_we_are_a_number, 3), - round(test_value + add_value, 3), - ) - - return run - - class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): - """BQ has no support for BQ util.text_type""" - - data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC) - - def test_select_direct(self, connection): - # This func added because this test was failing when passed the - # UTC timezone. - - def literal(value, type_=None): - assert value == self.data - - if type_ is not None: - assert type_ is self.datatype - - return sqlalchemy.sql.elements.literal(value, self.datatype) - - with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal): - super(TimestampMicrosecondsTest, self).test_select_direct(connection) - - def test_round_trip_executemany(self, connection): - unicode_table = self.tables.unicode_table - connection.execute( - unicode_table.insert(), - [{"id": i, "unicode_data": self.data} for i in range(3)], - ) - - rows = connection.execute(select(unicode_table.c.unicode_data)).fetchall() - eq_(rows, [(self.data,) for i in range(3)]) - for row in rows: - assert isinstance(row[0], str) - - sqlalchemy.testing.suite.test_types._UnicodeFixture.test_round_trip_executemany = ( - test_round_trip_executemany - ) - - class TrueDivTest(_TrueDivTest): - @pytest.mark.skip("BQ rounds based on datatype") - def test_floordiv_integer(self): + class FetchLimitOffsetTest(_FetchLimitOffsetTest): + @pytest.mark.skip("BigQuery doesn't allow an offset without a limit.") + def test_simple_offset(self): pass - @pytest.mark.skip("BQ rounds based on datatype") - def test_floordiv_integer_bound(self): - pass + test_bound_offset = test_simple_offset + test_expr_offset = test_simple_offset_zero = test_simple_offset + test_limit_offset_nobinds = test_simple_offset # TODO figure out + # how to prevent this from failing + # The original test is missing an order by. - class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): - """The base tests fail if operations return rows for some reason.""" + # The original test is missing an order by. - def test_update(self): - t = self.tables.plain_pk - connection = config.db.connect() - # In SQLAlchemy 2.0, the datatype changed to dict in the following function. - r = connection.execute(t.update().where(t.c.id == 2), dict(data="d2_new")) - assert not r.is_insert - - eq_( - connection.execute(t.select().order_by(t.c.id)).fetchall(), - [(1, "d1"), (2, "d2_new"), (3, "d3")], - ) - - def test_delete(self): - t = self.tables.plain_pk - connection = config.db.connect() - r = connection.execute(t.delete().where(t.c.id == 2)) - assert not r.is_insert - eq_( - connection.execute(t.select().order_by(t.c.id)).fetchall(), - [(1, "d1"), (3, "d3")], - ) + # Also, note that sqlalchemy union is a union distinct, not a + # union all. This test caught that were were getting that wrong. + def test_limit_render_multiple_times(self, connection): + table = self.tables.some_table + stmt = select(table.c.id).order_by(table.c.id).limit(1).scalar_subquery() - class StringTest(_StringTest): - """Added a where clause for BQ compatibility""" + u = sqlalchemy.union(select(stmt), select(stmt)).subquery().select() - def test_dont_truncate_rightside( - self, metadata, connection, expr=None, expected=None - ): - t = Table( - "t", - metadata, - Column("x", String(2)), - Column("id", Integer, primary_key=True), - ) - t.create(connection) - connection.connection.commit() - connection.execute( - t.insert(), - [{"x": "AB", "id": 1}, {"x": "BC", "id": 2}, {"x": "AC", "id": 3}], + self._assert_result( + connection, + u, + [(1,)], ) - combinations = [("%B%", ["AB", "BC"]), ("A%C", ["AC"]), ("A%C%Z", [])] - - for args in combinations: - eq_( - connection.scalars(select(t.c.x).where(t.c.x.like(args[0]))).all(), - args[1], - ) class UuidTest(_UuidTest): - """BQ needs to pass in UUID as a string""" - @classmethod def define_tables(cls, metadata): Table( @@ -424,38 +439,81 @@ def test_uuid_returning(self, connection): eq_(row, (data, str_data, data, str_data)) -else: - from sqlalchemy.testing.suite import ( - RowCountTest as _RowCountTest, - ) + class StringTest(_StringTest): + def test_dont_truncate_rightside( + self, metadata, connection, expr=None, expected=None + ): + t = Table( + "t", + metadata, + Column("x", String(2)), + Column("id", Integer, primary_key=True), + ) + t.create(connection) + connection.connection.commit() + connection.execute( + t.insert(), + [{"x": "AB", "id": 1}, {"x": "BC", "id": 2}, {"x": "AC", "id": 3}], + ) + combinations = [("%B%", ["AB", "BC"]), ("A%C", ["AC"]), ("A%C%Z", [])] - del DifficultParametersTest # exercises column names illegal in BQ + for args in combinations: + eq_( + connection.scalars(select(t.c.x).where(t.c.x.like(args[0]))).all(), + args[1], + ) - class RowCountTest(_RowCountTest): - """""" + # from else statement .... + del DistinctOnTest # expects unquoted table names. + del HasIndexTest # BQ doesn't do the indexes that SQLA is loooking for. + del IdentityAutoincrementTest # BQ doesn't do autoincrement + del PostCompileParamsTest # BQ adds backticks to bind parameters, causing failure of tests TODO: fix this? - @classmethod - def insert_data(cls, connection): - cls.data = data = [ - ("Angela", "A"), - ("Andrew", "A"), - ("Anand", "A"), - ("Bob", "B"), - ("Bobette", "B"), - ("Buffy", "B"), - ("Charlie", "C"), - ("Cynthia", "C"), - ("Chris", "C"), - ] +elif packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"): + from sqlalchemy.testing.suite import LimitOffsetTest as _LimitOffsetTest - employees_table = cls.tables.employees - connection.execute( - employees_table.insert(), - [ - {"employee_id": i, "name": n, "department": d} - for i, (n, d) in enumerate(data) - ], - ) + class LimitOffsetTest(_LimitOffsetTest): + @pytest.mark.skip("BigQuery doesn't allow an offset without a limit.") + def test_simple_offset(self): + pass + + test_bound_offset = test_simple_offset + + class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): + data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC) + + def test_literal(self): + # The base tests doesn't set up the literal properly, because + # it doesn't pass its datatype to `literal`. + + def literal(value): + assert value == self.data + return sqlalchemy.sql.elements.literal(value, self.datatype) + + with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal): + super(TimestampMicrosecondsTest, self).test_literal() + + def test_select_direct(self, connection): + # This func added because this test was failing when passed the + # UTC timezone. + + def literal(value, type_=None): + assert value == self.data + + if type_ is not None: + assert type_ is self.datatype + + return sqlalchemy.sql.elements.literal(value, self.datatype) + + with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal): + super(TimestampMicrosecondsTest, self).test_select_direct(connection) + + class InsertBehaviorTest(_InsertBehaviorTest): + @pytest.mark.skip( + "BQ has no autoinc and client-side defaults can't work for select." + ) + def test_insert_from_select_autoinc(cls): + pass class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): """The base tests fail if operations return rows for some reason.""" @@ -479,6 +537,46 @@ def test_delete(self): [(1, "d1"), (3, "d3")], ) +else: + from sqlalchemy.testing.suite import ( + FetchLimitOffsetTest as _FetchLimitOffsetTest, + RowCountTest as _RowCountTest, + ) + + class FetchLimitOffsetTest(_FetchLimitOffsetTest): + @pytest.mark.skip("BigQuery doesn't allow an offset without a limit.") + def test_simple_offset(self): + pass + + test_bound_offset = test_simple_offset + test_expr_offset = test_simple_offset_zero = test_simple_offset + test_limit_offset_nobinds = test_simple_offset # TODO figure out + # how to prevent this from failing + # The original test is missing an order by. + + # Also, note that sqlalchemy union is a union distinct, not a + # union all. This test caught that were were getting that wrong. + def test_limit_render_multiple_times(self, connection): + table = self.tables.some_table + stmt = select(table.c.id).order_by(table.c.id).limit(1).scalar_subquery() + + u = sqlalchemy.union(select(stmt), select(stmt)).subquery().select() + + self._assert_result( + connection, + u, + [(1,)], + ) + + del DifficultParametersTest # exercises column names illegal in BQ + del DistinctOnTest # expects unquoted table names. + del HasIndexTest # BQ doesn't do the indexes that SQLA is loooking for. + del IdentityAutoincrementTest # BQ doesn't do autoincrement + + # This test makes makes assertions about generated sql and trips + # over the backquotes that we add everywhere. XXX Why do we do that? + del PostCompileParamsTest + class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC) @@ -529,15 +627,70 @@ def test_round_trip_executemany(self, connection): test_round_trip_executemany ) + class RowCountTest(_RowCountTest): + @classmethod + def insert_data(cls, connection): + cls.data = data = [ + ("Angela", "A"), + ("Andrew", "A"), + ("Anand", "A"), + ("Bob", "B"), + ("Bobette", "B"), + ("Buffy", "B"), + ("Charlie", "C"), + ("Cynthia", "C"), + ("Chris", "C"), + ] -class CTETest(_CTETest): - @pytest.mark.skip("Can't use CTEs with insert") - def test_insert_from_select_round_trip(self): - pass + employees_table = cls.tables.employees + connection.execute( + employees_table.insert(), + [ + {"employee_id": i, "name": n, "department": d} + for i, (n, d) in enumerate(data) + ], + ) - @pytest.mark.skip("Recusive CTEs aren't supported.") - def test_select_recursive_round_trip(self): - pass + class InsertBehaviorTest(_InsertBehaviorTest): + @pytest.mark.skip( + "BQ has no autoinc and client-side defaults can't work for select." + ) + def test_insert_from_select_autoinc(cls): + pass + + class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): + """The base tests fail if operations return rows for some reason.""" + + def test_update(self): + t = self.tables.plain_pk + r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new") + assert not r.is_insert + + eq_( + config.db.execute(t.select().order_by(t.c.id)).fetchall(), + [(1, "d1"), (2, "d2_new"), (3, "d3")], + ) + + def test_delete(self): + t = self.tables.plain_pk + r = config.db.execute(t.delete().where(t.c.id == 2)) + assert not r.is_insert + eq_( + config.db.execute(t.select().order_by(t.c.id)).fetchall(), + [(1, "d1"), (3, "d3")], + ) + + +# Quotes aren't allowed in BigQuery table names. +del QuotedNameArgumentTest + + +# class InsertBehaviorTest(_InsertBehaviorTest): +# @pytest.mark.skip( +# "BQ has no autoinc and client-side defaults can't work for select." +# ) +# def test_insert_from_select_autoinc(cls): +# pass class ExistsTest(_ExistsTest): @@ -572,43 +725,42 @@ def test_select_exists_false(self, connection): ) -class FetchLimitOffsetTest(_FetchLimitOffsetTest): - @pytest.mark.skip("BigQuery doesn't allow an offset without a limit.") - def test_simple_offset(self): - pass +# This test requires features (indexes, primary keys, etc., that BigQuery doesn't have. +del LongNameBlowoutTest - test_bound_offset = test_simple_offset - test_expr_offset = test_simple_offset_zero = test_simple_offset - test_limit_offset_nobinds = test_simple_offset # TODO figure out - # how to prevent this from failing - # The original test is missing an order by. - # Also, note that sqlalchemy union is a union distinct, not a - # union all. This test caught that were were getting that wrong. - def test_limit_render_multiple_times(self, connection): - table = self.tables.some_table - stmt = select(table.c.id).order_by(table.c.id).limit(1).scalar_subquery() +# class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): +# """The base tests fail if operations return rows for some reason.""" - u = sqlalchemy.union(select(stmt), select(stmt)).subquery().select() +# def test_update(self): +# t = self.tables.plain_pk +# r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new") +# assert not r.is_insert +# # assert not r.returns_rows - self._assert_result( - connection, - u, - [(1,)], - ) +# eq_( +# config.db.execute(t.select().order_by(t.c.id)).fetchall(), +# [(1, "d1"), (2, "d2_new"), (3, "d3")], +# ) +# def test_delete(self): +# t = self.tables.plain_pk +# r = config.db.execute(t.delete().where(t.c.id == 2)) +# assert not r.is_insert +# # assert not r.returns_rows +# eq_( +# config.db.execute(t.select().order_by(t.c.id)).fetchall(), +# [(1, "d1"), (3, "d3")], +# ) -class InsertBehaviorTest(_InsertBehaviorTest): - @pytest.mark.skip( - "BQ has no autoinc and client-side defaults can't work for select." - ) - def test_insert_from_select_autoinc(cls): + +class CTETest(_CTETest): + @pytest.mark.skip("Can't use CTEs with insert") + def test_insert_from_select_round_trip(self): pass - @pytest.mark.skip( - "BQ has no autoinc and client-side defaults can't work for select." - ) - def test_no_results_for_non_returning_insert(cls): + @pytest.mark.skip("Recusive CTEs aren't supported.") + def test_select_recursive_round_trip(self): pass @@ -628,9 +780,3 @@ def test_no_results_for_non_returning_insert(cls): del ArrayTest # only appears to apply to postgresql del BizarroCharacterFKResolutionTest del HasTableTest.test_has_table_cache # TODO confirm whether BQ has table caching -del DistinctOnTest # expects unquoted table names. -del HasIndexTest # BQ doesn't do the indexes that SQLA is loooking for. -del IdentityAutoincrementTest # BQ doesn't do autoincrement -del LongNameBlowoutTest # Requires features (indexes, primary keys, etc., that BigQuery doesn't have. -del PostCompileParamsTest # BQ adds backticks to bind parameters, causing failure of tests TODO: fix this? -del QuotedNameArgumentTest # Quotes aren't allowed in BigQuery table names. diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index c75113a9..2371b80b 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -30,6 +30,18 @@ from . import fauxdbi sqlalchemy_version = packaging.version.parse(sqlalchemy.__version__) +sqlalchemy_1_3_or_higher = pytest.mark.skipif( + sqlalchemy_version < packaging.version.parse("1.3"), + reason="requires sqlalchemy 1.3 or higher", +) +sqlalchemy_1_4_or_higher = pytest.mark.skipif( + sqlalchemy_version < packaging.version.parse("1.4"), + reason="requires sqlalchemy 1.4 or higher", +) +sqlalchemy_before_1_4 = pytest.mark.skipif( + sqlalchemy_version >= packaging.version.parse("1.4"), + reason="requires sqlalchemy 1.3 or lower", +) sqlalchemy_before_2_0 = pytest.mark.skipif( sqlalchemy_version >= packaging.version.parse("2.0"), reason="requires sqlalchemy 1.3 or lower", diff --git a/tests/unit/test_compiler.py b/tests/unit/test_compiler.py index 5ac71485..19993761 100644 --- a/tests/unit/test_compiler.py +++ b/tests/unit/test_compiler.py @@ -22,6 +22,8 @@ from .conftest import setup_table from .conftest import ( + sqlalchemy_1_4_or_higher, + sqlalchemy_before_1_4, sqlalchemy_2_0_or_higher, sqlalchemy_before_2_0, ) @@ -61,6 +63,7 @@ def test_cant_compile_unnamed_column(faux_conn, metadata): sqlalchemy.Column(sqlalchemy.Integer).compile(faux_conn) +@sqlalchemy_1_4_or_higher def test_no_alias_for_known_tables(faux_conn, metadata): # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/353 table = setup_table( @@ -82,6 +85,7 @@ def test_no_alias_for_known_tables(faux_conn, metadata): assert found_sql == expected_sql +@sqlalchemy_1_4_or_higher def test_no_alias_for_known_tables_cte(faux_conn, metadata): # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 table = setup_table( @@ -235,6 +239,7 @@ def test_no_implicit_join_for_inner_unnest(faux_conn, metadata): assert found_outer_sql == expected_outer_sql +@sqlalchemy_1_4_or_higher def test_no_implicit_join_asterix_for_inner_unnest_no_table2_column( faux_conn, metadata ): @@ -259,6 +264,7 @@ def test_no_implicit_join_asterix_for_inner_unnest_no_table2_column( assert found_outer_sql == expected_outer_sql +@sqlalchemy_1_4_or_higher def test_no_implicit_join_for_inner_unnest_no_table2_column(faux_conn, metadata): # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 q = prepare_implicit_join_base_query(faux_conn, metadata, False, False) diff --git a/tests/unit/test_compliance.py b/tests/unit/test_compliance.py index bd90d936..630d5058 100644 --- a/tests/unit/test_compliance.py +++ b/tests/unit/test_compliance.py @@ -27,7 +27,7 @@ from sqlalchemy import Column, Integer, literal_column, select, String, Table, union from sqlalchemy.testing.assertions import eq_, in_ -from .conftest import setup_table +from .conftest import setup_table, sqlalchemy_1_3_or_higher def assert_result(connection, sel, expected, params=()): @@ -106,6 +106,7 @@ def test_percent_sign_round_trip(faux_conn, metadata): ) +@sqlalchemy_1_3_or_higher def test_empty_set_against_integer(faux_conn): table = some_table(faux_conn) @@ -118,6 +119,7 @@ def test_empty_set_against_integer(faux_conn): assert_result(faux_conn, stmt, [], params={"q": []}) +@sqlalchemy_1_3_or_higher def test_null_in_empty_set_is_false(faux_conn): stmt = select( sqlalchemy.case( diff --git a/tests/unit/test_select.py b/tests/unit/test_select.py index ad80047a..55acf4a0 100644 --- a/tests/unit/test_select.py +++ b/tests/unit/test_select.py @@ -20,13 +20,20 @@ import datetime from decimal import Decimal +import packaging.version import pytest import sqlalchemy from sqlalchemy import not_ import sqlalchemy_bigquery -from .conftest import setup_table +from .conftest import ( + setup_table, + sqlalchemy_version, + sqlalchemy_1_3_or_higher, + sqlalchemy_1_4_or_higher, + sqlalchemy_before_1_4, +) def test_labels_not_forced(faux_conn): @@ -218,6 +225,20 @@ def test_disable_quote(faux_conn): assert faux_conn.test_data["execute"][-1][0] == ("SELECT `t`.foo \nFROM `t`") +@sqlalchemy_before_1_4 +def test_select_in_lit_13(faux_conn): + [[isin]] = faux_conn.execute( + sqlalchemy.select(sqlalchemy.literal(1).in_([1, 2, 3])) + ) + assert isin + assert faux_conn.test_data["execute"][-1] == ( + "SELECT %(param_1:INT64)s IN " + "(%(param_2:INT64)s, %(param_3:INT64)s, %(param_4:INT64)s) AS `anon_1`", + {"param_1": 1, "param_2": 1, "param_3": 2, "param_4": 3}, + ) + + +@sqlalchemy_1_4_or_higher def test_select_in_lit(faux_conn, last_query): faux_conn.execute(sqlalchemy.select(sqlalchemy.literal(1).in_([1, 2, 3]))) last_query( @@ -227,45 +248,81 @@ def test_select_in_lit(faux_conn, last_query): def test_select_in_param(faux_conn, last_query): - faux_conn.execute( + [[isin]] = faux_conn.execute( sqlalchemy.select( sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[1, 2, 3]), ) - - last_query( - "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", - {"param_1": 1, "q": [1, 2, 3]}, - ) + if sqlalchemy_version >= packaging.version.parse("1.4"): + last_query( + "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", + {"param_1": 1, "q": [1, 2, 3]}, + ) + else: + assert isin + last_query( + "SELECT %(param_1:INT64)s IN UNNEST(" + "[ %(q_1:INT64)s, %(q_2:INT64)s, %(q_3:INT64)s ]" + ") AS `anon_1`", + {"param_1": 1, "q_1": 1, "q_2": 2, "q_3": 3}, + ) def test_select_in_param1(faux_conn, last_query): - faux_conn.execute( + [[isin]] = faux_conn.execute( sqlalchemy.select( sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[1]), ) - last_query( - "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", - {"param_1": 1, "q": [1]}, - ) + if sqlalchemy_version >= packaging.version.parse("1.4"): + last_query( + "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", + {"param_1": 1, "q": [1]}, + ) + else: + assert isin + last_query( + "SELECT %(param_1:INT64)s IN UNNEST(" "[ %(q_1:INT64)s ]" ") AS `anon_1`", + {"param_1": 1, "q_1": 1}, + ) +@sqlalchemy_1_3_or_higher def test_select_in_param_empty(faux_conn, last_query): - faux_conn.execute( + [[isin]] = faux_conn.execute( sqlalchemy.select( sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[]), ) - last_query( - "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", - {"param_1": 1, "q": []}, + if sqlalchemy_version >= packaging.version.parse("1.4"): + last_query( + "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", + {"param_1": 1, "q": []}, + ) + else: + assert not isin + last_query( + "SELECT %(param_1:INT64)s IN UNNEST([ ]) AS `anon_1`", {"param_1": 1} + ) + + +@sqlalchemy_before_1_4 +def test_select_notin_lit13(faux_conn): + [[isnotin]] = faux_conn.execute( + sqlalchemy.select(sqlalchemy.literal(0).notin_([1, 2, 3])) + ) + assert isnotin + assert faux_conn.test_data["execute"][-1] == ( + "SELECT (%(param_1:INT64)s NOT IN " + "(%(param_2:INT64)s, %(param_3:INT64)s, %(param_4:INT64)s)) AS `anon_1`", + {"param_1": 0, "param_2": 1, "param_3": 2, "param_4": 3}, ) +@sqlalchemy_1_4_or_higher def test_select_notin_lit(faux_conn, last_query): faux_conn.execute(sqlalchemy.select(sqlalchemy.literal(0).notin_([1, 2, 3]))) last_query( @@ -275,29 +332,45 @@ def test_select_notin_lit(faux_conn, last_query): def test_select_notin_param(faux_conn, last_query): - faux_conn.execute( + [[isnotin]] = faux_conn.execute( sqlalchemy.select( sqlalchemy.literal(1).notin_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[1, 2, 3]), ) - last_query( - "SELECT (%(param_1:INT64)s NOT IN UNNEST(%(q:INT64)s)) AS `anon_1`", - {"param_1": 1, "q": [1, 2, 3]}, - ) + if sqlalchemy_version >= packaging.version.parse("1.4"): + last_query( + "SELECT (%(param_1:INT64)s NOT IN UNNEST(%(q:INT64)s)) AS `anon_1`", + {"param_1": 1, "q": [1, 2, 3]}, + ) + else: + assert not isnotin + last_query( + "SELECT (%(param_1:INT64)s NOT IN UNNEST(" + "[ %(q_1:INT64)s, %(q_2:INT64)s, %(q_3:INT64)s ]" + ")) AS `anon_1`", + {"param_1": 1, "q_1": 1, "q_2": 2, "q_3": 3}, + ) +@sqlalchemy_1_3_or_higher def test_select_notin_param_empty(faux_conn, last_query): - faux_conn.execute( + [[isnotin]] = faux_conn.execute( sqlalchemy.select( sqlalchemy.literal(1).notin_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[]), ) - last_query( - "SELECT (%(param_1:INT64)s NOT IN UNNEST(%(q:INT64)s)) AS `anon_1`", - {"param_1": 1, "q": []}, - ) + if sqlalchemy_version >= packaging.version.parse("1.4"): + last_query( + "SELECT (%(param_1:INT64)s NOT IN UNNEST(%(q:INT64)s)) AS `anon_1`", + {"param_1": 1, "q": []}, + ) + else: + assert isnotin + last_query( + "SELECT (%(param_1:INT64)s NOT IN UNNEST([ ])) AS `anon_1`", {"param_1": 1} + ) def test_literal_binds_kwarg_with_an_IN_operator_252(faux_conn): @@ -318,6 +391,7 @@ def nstr(q): ) +@sqlalchemy_1_4_or_higher @pytest.mark.parametrize("alias", [True, False]) def test_unnest(faux_conn, alias): from sqlalchemy import String @@ -335,6 +409,7 @@ def test_unnest(faux_conn, alias): ) +@sqlalchemy_1_4_or_higher @pytest.mark.parametrize("alias", [True, False]) def test_table_valued_alias_w_multiple_references_to_the_same_table(faux_conn, alias): from sqlalchemy import String @@ -353,6 +428,7 @@ def test_table_valued_alias_w_multiple_references_to_the_same_table(faux_conn, a ) +@sqlalchemy_1_4_or_higher @pytest.mark.parametrize("alias", [True, False]) def test_unnest_w_no_table_references(faux_conn, alias): fcall = sqlalchemy.func.unnest([1, 2, 3]) @@ -376,6 +452,10 @@ def test_array_indexing(faux_conn, metadata): assert got == "SELECT `t`.`a`[OFFSET(%(a_1:INT64)s)] AS `anon_1` \nFROM `t`" +@pytest.mark.skipif( + packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"), + reason="regexp_match support requires version 1.4 or higher", +) def test_visit_regexp_match_op_binary(faux_conn): table = setup_table( faux_conn, @@ -392,6 +472,10 @@ def test_visit_regexp_match_op_binary(faux_conn): assert result == expected +@pytest.mark.skipif( + packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"), + reason="regexp_match support requires version 1.4 or higher", +) def test_visit_not_regexp_match_op_binary(faux_conn): table = setup_table( faux_conn, diff --git a/tests/unit/test_sqlalchemy_bigquery.py b/tests/unit/test_sqlalchemy_bigquery.py index db20e2f0..d64e1b97 100644 --- a/tests/unit/test_sqlalchemy_bigquery.py +++ b/tests/unit/test_sqlalchemy_bigquery.py @@ -10,6 +10,7 @@ from google.cloud import bigquery from google.cloud.bigquery.dataset import DatasetListItem from google.cloud.bigquery.table import TableListItem +import packaging.version import pytest import sqlalchemy @@ -226,7 +227,12 @@ def test_unnest_function(args, kw): f = sqlalchemy.func.unnest(*args, **kw) assert isinstance(f.type, sqlalchemy.String) - assert isinstance(sqlalchemy.select(f).subquery().c.unnest.type, sqlalchemy.String) + if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse( + "1.4" + ): + assert isinstance( + sqlalchemy.select(f).subquery().c.unnest.type, sqlalchemy.String + ) @mock.patch("sqlalchemy_bigquery._helpers.create_bigquery_client") From 19d9ce2d6b19fb763addf8f985ec2fcd1a331068 Mon Sep 17 00:00:00 2001 From: Kira Date: Mon, 29 Jan 2024 14:41:59 -0800 Subject: [PATCH 41/62] chore: fix coverage tests sqlalchemy 2.0 migration (#987) * chore: remove code for sqlalchemy before 1_4 * reformatted with black: * Removed sqlalchemy compliance tests from versions before 1.4 * removed code in base.py for sqlalchemy < 1.4 * fix coverage issues in base.py * temporarily commented out code lines not passing coverage for testing purposes * replaced functions previously removed for not passing cover * testing removing functions for coverage * add no cover tag to untested code and clean up commented out functions * fix lint issues * black * Readded deleted tests and renamed them from deprecated names * black --------- Co-authored-by: Sharoon Thomas --- sqlalchemy_bigquery/_struct.py | 42 ++---- sqlalchemy_bigquery/base.py | 32 ++--- sqlalchemy_bigquery/requirements.py | 1 - .../test_dialect_compliance.py | 78 +--------- tests/unit/conftest.py | 12 -- tests/unit/test_compiler.py | 6 - tests/unit/test_compliance.py | 4 +- tests/unit/test_select.py | 136 ++++-------------- tests/unit/test_sqlalchemy_bigquery.py | 8 +- 9 files changed, 52 insertions(+), 267 deletions(-) diff --git a/sqlalchemy_bigquery/_struct.py b/sqlalchemy_bigquery/_struct.py index 7c084c98..309d1080 100644 --- a/sqlalchemy_bigquery/_struct.py +++ b/sqlalchemy_bigquery/_struct.py @@ -17,20 +17,14 @@ # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -import packaging.version import sqlalchemy.sql.default_comparator import sqlalchemy.sql.sqltypes import sqlalchemy.types from . import base -sqlalchemy_1_4_or_more = packaging.version.parse( - sqlalchemy.__version__ -) >= packaging.version.parse("1.4") - -if sqlalchemy_1_4_or_more: - import sqlalchemy.sql.coercions - import sqlalchemy.sql.roles +import sqlalchemy.sql.coercions +import sqlalchemy.sql.roles def _get_subtype_col_spec(type_): @@ -109,30 +103,14 @@ def __getattr__(self, name): comparator_factory = Comparator -# In the implementations of _field_index below, we're stealing from -# the JSON type implementation, but the code to steal changed in -# 1.4. :/ - -if sqlalchemy_1_4_or_more: - - def _field_index(self, name, operator): - return sqlalchemy.sql.coercions.expect( - sqlalchemy.sql.roles.BinaryElementRole, - name, - expr=self.expr, - operator=operator, - bindparam_type=sqlalchemy.types.String(), - ) - -else: - - def _field_index(self, name, operator): - return sqlalchemy.sql.default_comparator._check_literal( - self.expr, - operator, - name, - bindparam_type=sqlalchemy.types.String(), - ) +def _field_index(self, name, operator): + return sqlalchemy.sql.coercions.expect( + sqlalchemy.sql.roles.BinaryElementRole, + name, + expr=self.expr, + operator=operator, + bindparam_type=sqlalchemy.types.String(), + ) def struct_getitem_op(a, b): diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index da4f18fc..bcff58be 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -163,7 +163,7 @@ def get_insert_default(self, column): # pragma: NO COVER """, flags=re.IGNORECASE | re.VERBOSE, ) - def __distribute_types_to_expanded_placeholders(self, m): + def __distribute_types_to_expanded_placeholders(self, m): # pragma: NO COVER # If we have an in parameter, it sometimes gets expaned to 0 or more # parameters and we need to move the type marker to each # parameter. @@ -174,6 +174,8 @@ def __distribute_types_to_expanded_placeholders(self, m): # suffixes refect that when an array parameter is expanded, # numeric suffixes are added. For example, a placeholder like # `%(foo)s` gets expaneded to `%(foo_0)s, `%(foo_1)s, ...`. + + # Coverage: despite our best efforts, never recognized this segment of code as being tested. placeholders, type_ = m.groups() if placeholders: placeholders = placeholders.replace(")", f":{type_})") @@ -356,11 +358,7 @@ def group_by_clause(self, select, **kw): __sqlalchemy_version_info = packaging.version.parse(sqlalchemy.__version__) - __expanding_text = ( - "EXPANDING" - if __sqlalchemy_version_info < packaging.version.parse("1.4") - else "POSTCOMPILE" - ) + __expanding_text = "POSTCOMPILE" # https://github.com/sqlalchemy/sqlalchemy/commit/f79df12bd6d99b8f6f09d4bf07722638c4b4c159 __expanding_conflict = ( @@ -388,9 +386,6 @@ def visit_in_op_binary(self, binary, operator_, **kw): self._generate_generic_binary(binary, " IN ", **kw) ) - def visit_empty_set_expr(self, element_types, **kw): - return "" - def visit_not_in_op_binary(self, binary, operator, **kw): return ( "(" @@ -424,8 +419,8 @@ def visit_contains_op_binary(self, binary, operator, **kw): self._maybe_reescape(binary), operator, **kw ) - def visit_notcontains_op_binary(self, binary, operator, **kw): - return super(BigQueryCompiler, self).visit_notcontains_op_binary( + def visit_not_contains_op_binary(self, binary, operator, **kw): + return super(BigQueryCompiler, self).visit_not_contains_op_binary( self._maybe_reescape(binary), operator, **kw ) @@ -434,8 +429,8 @@ def visit_startswith_op_binary(self, binary, operator, **kw): self._maybe_reescape(binary), operator, **kw ) - def visit_notstartswith_op_binary(self, binary, operator, **kw): - return super(BigQueryCompiler, self).visit_notstartswith_op_binary( + def visit_not_startswith_op_binary(self, binary, operator, **kw): + return super(BigQueryCompiler, self).visit_not_startswith_op_binary( self._maybe_reescape(binary), operator, **kw ) @@ -444,8 +439,8 @@ def visit_endswith_op_binary(self, binary, operator, **kw): self._maybe_reescape(binary), operator, **kw ) - def visit_notendswith_op_binary(self, binary, operator, **kw): - return super(BigQueryCompiler, self).visit_notendswith_op_binary( + def visit_not_endswith_op_binary(self, binary, operator, **kw): + return super(BigQueryCompiler, self).visit_not_endswith_op_binary( self._maybe_reescape(binary), operator, **kw ) @@ -510,7 +505,8 @@ def visit_bindparam( # here, because then we can't do a recompile later (e.g., first # print the statment, then execute it). See issue #357. # - if getattr(bindparam, "expand_op", None) is not None: + # Coverage: despite our best efforts, never recognized this segment of code as being tested. + if getattr(bindparam, "expand_op", None) is not None: # pragma: NO COVER assert bindparam.expand_op.__name__.endswith("in_op") # in in bindparam = bindparam._clone(maintain_key=True) bindparam.expanding = False @@ -1278,10 +1274,6 @@ def do_rollback(self, dbapi_connection): # BigQuery has no support for transactions. pass - def _check_unicode_returns(self, connection, additional_tests=None): - # requests gives back Unicode strings - return True - def get_view_definition(self, connection, view_name, schema=None, **kw): if isinstance(connection, Engine): connection = connection.connect() diff --git a/sqlalchemy_bigquery/requirements.py b/sqlalchemy_bigquery/requirements.py index af6dec75..118e3946 100644 --- a/sqlalchemy_bigquery/requirements.py +++ b/sqlalchemy_bigquery/requirements.py @@ -24,7 +24,6 @@ import sqlalchemy.testing.requirements import sqlalchemy.testing.exclusions -from sqlalchemy.testing.exclusions import against, only_on supported = sqlalchemy.testing.exclusions.open unsupported = sqlalchemy.testing.exclusions.closed diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index 7677510e..5ce6f9ad 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -29,14 +29,11 @@ import sqlalchemy.testing.suite.test_types import sqlalchemy.sql.sqltypes from sqlalchemy.testing import util, config -from sqlalchemy.testing import is_false -from sqlalchemy.testing import is_true -from sqlalchemy.testing import is_ from sqlalchemy.testing.assertions import eq_ -from sqlalchemy.testing.suite import config, select, exists +from sqlalchemy.testing.suite import select, exists from sqlalchemy.testing.suite import * # noqa +from sqlalchemy.testing.suite import Integer, Table, Column, String, bindparam, testing from sqlalchemy.testing.suite import ( - ComponentReflectionTest as _ComponentReflectionTest, CTETest as _CTETest, ExistsTest as _ExistsTest, InsertBehaviorTest as _InsertBehaviorTest, @@ -53,21 +50,18 @@ from sqlalchemy.testing.suite.test_reflection import ( BizarroCharacterFKResolutionTest, ComponentReflectionTest, - OneConnectionTablesTest, HasTableTest, ) if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse("2.0"): import uuid from sqlalchemy.sql import type_coerce - from sqlalchemy import Uuid from sqlalchemy.testing.suite import ( TrueDivTest as _TrueDivTest, IntegerTest as _IntegerTest, NumericTest as _NumericTest, DifficultParametersTest as _DifficultParametersTest, FetchLimitOffsetTest as _FetchLimitOffsetTest, - PostCompileParamsTest, StringTest as _StringTest, UuidTest as _UuidTest, ) @@ -469,74 +463,6 @@ def test_dont_truncate_rightside( del IdentityAutoincrementTest # BQ doesn't do autoincrement del PostCompileParamsTest # BQ adds backticks to bind parameters, causing failure of tests TODO: fix this? -elif packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"): - from sqlalchemy.testing.suite import LimitOffsetTest as _LimitOffsetTest - - class LimitOffsetTest(_LimitOffsetTest): - @pytest.mark.skip("BigQuery doesn't allow an offset without a limit.") - def test_simple_offset(self): - pass - - test_bound_offset = test_simple_offset - - class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): - data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC) - - def test_literal(self): - # The base tests doesn't set up the literal properly, because - # it doesn't pass its datatype to `literal`. - - def literal(value): - assert value == self.data - return sqlalchemy.sql.elements.literal(value, self.datatype) - - with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal): - super(TimestampMicrosecondsTest, self).test_literal() - - def test_select_direct(self, connection): - # This func added because this test was failing when passed the - # UTC timezone. - - def literal(value, type_=None): - assert value == self.data - - if type_ is not None: - assert type_ is self.datatype - - return sqlalchemy.sql.elements.literal(value, self.datatype) - - with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal): - super(TimestampMicrosecondsTest, self).test_select_direct(connection) - - class InsertBehaviorTest(_InsertBehaviorTest): - @pytest.mark.skip( - "BQ has no autoinc and client-side defaults can't work for select." - ) - def test_insert_from_select_autoinc(cls): - pass - - class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): - """The base tests fail if operations return rows for some reason.""" - - def test_update(self): - t = self.tables.plain_pk - r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new") - assert not r.is_insert - - eq_( - config.db.execute(t.select().order_by(t.c.id)).fetchall(), - [(1, "d1"), (2, "d2_new"), (3, "d3")], - ) - - def test_delete(self): - t = self.tables.plain_pk - r = config.db.execute(t.delete().where(t.c.id == 2)) - assert not r.is_insert - eq_( - config.db.execute(t.select().order_by(t.c.id)).fetchall(), - [(1, "d1"), (3, "d3")], - ) - else: from sqlalchemy.testing.suite import ( FetchLimitOffsetTest as _FetchLimitOffsetTest, diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 2371b80b..c75113a9 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -30,18 +30,6 @@ from . import fauxdbi sqlalchemy_version = packaging.version.parse(sqlalchemy.__version__) -sqlalchemy_1_3_or_higher = pytest.mark.skipif( - sqlalchemy_version < packaging.version.parse("1.3"), - reason="requires sqlalchemy 1.3 or higher", -) -sqlalchemy_1_4_or_higher = pytest.mark.skipif( - sqlalchemy_version < packaging.version.parse("1.4"), - reason="requires sqlalchemy 1.4 or higher", -) -sqlalchemy_before_1_4 = pytest.mark.skipif( - sqlalchemy_version >= packaging.version.parse("1.4"), - reason="requires sqlalchemy 1.3 or lower", -) sqlalchemy_before_2_0 = pytest.mark.skipif( sqlalchemy_version >= packaging.version.parse("2.0"), reason="requires sqlalchemy 1.3 or lower", diff --git a/tests/unit/test_compiler.py b/tests/unit/test_compiler.py index 19993761..5ac71485 100644 --- a/tests/unit/test_compiler.py +++ b/tests/unit/test_compiler.py @@ -22,8 +22,6 @@ from .conftest import setup_table from .conftest import ( - sqlalchemy_1_4_or_higher, - sqlalchemy_before_1_4, sqlalchemy_2_0_or_higher, sqlalchemy_before_2_0, ) @@ -63,7 +61,6 @@ def test_cant_compile_unnamed_column(faux_conn, metadata): sqlalchemy.Column(sqlalchemy.Integer).compile(faux_conn) -@sqlalchemy_1_4_or_higher def test_no_alias_for_known_tables(faux_conn, metadata): # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/353 table = setup_table( @@ -85,7 +82,6 @@ def test_no_alias_for_known_tables(faux_conn, metadata): assert found_sql == expected_sql -@sqlalchemy_1_4_or_higher def test_no_alias_for_known_tables_cte(faux_conn, metadata): # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 table = setup_table( @@ -239,7 +235,6 @@ def test_no_implicit_join_for_inner_unnest(faux_conn, metadata): assert found_outer_sql == expected_outer_sql -@sqlalchemy_1_4_or_higher def test_no_implicit_join_asterix_for_inner_unnest_no_table2_column( faux_conn, metadata ): @@ -264,7 +259,6 @@ def test_no_implicit_join_asterix_for_inner_unnest_no_table2_column( assert found_outer_sql == expected_outer_sql -@sqlalchemy_1_4_or_higher def test_no_implicit_join_for_inner_unnest_no_table2_column(faux_conn, metadata): # See: https://github.com/googleapis/python-bigquery-sqlalchemy/issues/368 q = prepare_implicit_join_base_query(faux_conn, metadata, False, False) diff --git a/tests/unit/test_compliance.py b/tests/unit/test_compliance.py index 630d5058..bd90d936 100644 --- a/tests/unit/test_compliance.py +++ b/tests/unit/test_compliance.py @@ -27,7 +27,7 @@ from sqlalchemy import Column, Integer, literal_column, select, String, Table, union from sqlalchemy.testing.assertions import eq_, in_ -from .conftest import setup_table, sqlalchemy_1_3_or_higher +from .conftest import setup_table def assert_result(connection, sel, expected, params=()): @@ -106,7 +106,6 @@ def test_percent_sign_round_trip(faux_conn, metadata): ) -@sqlalchemy_1_3_or_higher def test_empty_set_against_integer(faux_conn): table = some_table(faux_conn) @@ -119,7 +118,6 @@ def test_empty_set_against_integer(faux_conn): assert_result(faux_conn, stmt, [], params={"q": []}) -@sqlalchemy_1_3_or_higher def test_null_in_empty_set_is_false(faux_conn): stmt = select( sqlalchemy.case( diff --git a/tests/unit/test_select.py b/tests/unit/test_select.py index 55acf4a0..ad80047a 100644 --- a/tests/unit/test_select.py +++ b/tests/unit/test_select.py @@ -20,20 +20,13 @@ import datetime from decimal import Decimal -import packaging.version import pytest import sqlalchemy from sqlalchemy import not_ import sqlalchemy_bigquery -from .conftest import ( - setup_table, - sqlalchemy_version, - sqlalchemy_1_3_or_higher, - sqlalchemy_1_4_or_higher, - sqlalchemy_before_1_4, -) +from .conftest import setup_table def test_labels_not_forced(faux_conn): @@ -225,20 +218,6 @@ def test_disable_quote(faux_conn): assert faux_conn.test_data["execute"][-1][0] == ("SELECT `t`.foo \nFROM `t`") -@sqlalchemy_before_1_4 -def test_select_in_lit_13(faux_conn): - [[isin]] = faux_conn.execute( - sqlalchemy.select(sqlalchemy.literal(1).in_([1, 2, 3])) - ) - assert isin - assert faux_conn.test_data["execute"][-1] == ( - "SELECT %(param_1:INT64)s IN " - "(%(param_2:INT64)s, %(param_3:INT64)s, %(param_4:INT64)s) AS `anon_1`", - {"param_1": 1, "param_2": 1, "param_3": 2, "param_4": 3}, - ) - - -@sqlalchemy_1_4_or_higher def test_select_in_lit(faux_conn, last_query): faux_conn.execute(sqlalchemy.select(sqlalchemy.literal(1).in_([1, 2, 3]))) last_query( @@ -248,81 +227,45 @@ def test_select_in_lit(faux_conn, last_query): def test_select_in_param(faux_conn, last_query): - [[isin]] = faux_conn.execute( + faux_conn.execute( sqlalchemy.select( sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[1, 2, 3]), ) - if sqlalchemy_version >= packaging.version.parse("1.4"): - last_query( - "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", - {"param_1": 1, "q": [1, 2, 3]}, - ) - else: - assert isin - last_query( - "SELECT %(param_1:INT64)s IN UNNEST(" - "[ %(q_1:INT64)s, %(q_2:INT64)s, %(q_3:INT64)s ]" - ") AS `anon_1`", - {"param_1": 1, "q_1": 1, "q_2": 2, "q_3": 3}, - ) + + last_query( + "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", + {"param_1": 1, "q": [1, 2, 3]}, + ) def test_select_in_param1(faux_conn, last_query): - [[isin]] = faux_conn.execute( + faux_conn.execute( sqlalchemy.select( sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[1]), ) - if sqlalchemy_version >= packaging.version.parse("1.4"): - last_query( - "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", - {"param_1": 1, "q": [1]}, - ) - else: - assert isin - last_query( - "SELECT %(param_1:INT64)s IN UNNEST(" "[ %(q_1:INT64)s ]" ") AS `anon_1`", - {"param_1": 1, "q_1": 1}, - ) + last_query( + "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", + {"param_1": 1, "q": [1]}, + ) -@sqlalchemy_1_3_or_higher def test_select_in_param_empty(faux_conn, last_query): - [[isin]] = faux_conn.execute( + faux_conn.execute( sqlalchemy.select( sqlalchemy.literal(1).in_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[]), ) - if sqlalchemy_version >= packaging.version.parse("1.4"): - last_query( - "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", - {"param_1": 1, "q": []}, - ) - else: - assert not isin - last_query( - "SELECT %(param_1:INT64)s IN UNNEST([ ]) AS `anon_1`", {"param_1": 1} - ) - - -@sqlalchemy_before_1_4 -def test_select_notin_lit13(faux_conn): - [[isnotin]] = faux_conn.execute( - sqlalchemy.select(sqlalchemy.literal(0).notin_([1, 2, 3])) - ) - assert isnotin - assert faux_conn.test_data["execute"][-1] == ( - "SELECT (%(param_1:INT64)s NOT IN " - "(%(param_2:INT64)s, %(param_3:INT64)s, %(param_4:INT64)s)) AS `anon_1`", - {"param_1": 0, "param_2": 1, "param_3": 2, "param_4": 3}, + last_query( + "SELECT %(param_1:INT64)s IN UNNEST(%(q:INT64)s) AS `anon_1`", + {"param_1": 1, "q": []}, ) -@sqlalchemy_1_4_or_higher def test_select_notin_lit(faux_conn, last_query): faux_conn.execute(sqlalchemy.select(sqlalchemy.literal(0).notin_([1, 2, 3]))) last_query( @@ -332,45 +275,29 @@ def test_select_notin_lit(faux_conn, last_query): def test_select_notin_param(faux_conn, last_query): - [[isnotin]] = faux_conn.execute( + faux_conn.execute( sqlalchemy.select( sqlalchemy.literal(1).notin_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[1, 2, 3]), ) - if sqlalchemy_version >= packaging.version.parse("1.4"): - last_query( - "SELECT (%(param_1:INT64)s NOT IN UNNEST(%(q:INT64)s)) AS `anon_1`", - {"param_1": 1, "q": [1, 2, 3]}, - ) - else: - assert not isnotin - last_query( - "SELECT (%(param_1:INT64)s NOT IN UNNEST(" - "[ %(q_1:INT64)s, %(q_2:INT64)s, %(q_3:INT64)s ]" - ")) AS `anon_1`", - {"param_1": 1, "q_1": 1, "q_2": 2, "q_3": 3}, - ) + last_query( + "SELECT (%(param_1:INT64)s NOT IN UNNEST(%(q:INT64)s)) AS `anon_1`", + {"param_1": 1, "q": [1, 2, 3]}, + ) -@sqlalchemy_1_3_or_higher def test_select_notin_param_empty(faux_conn, last_query): - [[isnotin]] = faux_conn.execute( + faux_conn.execute( sqlalchemy.select( sqlalchemy.literal(1).notin_(sqlalchemy.bindparam("q", expanding=True)) ), dict(q=[]), ) - if sqlalchemy_version >= packaging.version.parse("1.4"): - last_query( - "SELECT (%(param_1:INT64)s NOT IN UNNEST(%(q:INT64)s)) AS `anon_1`", - {"param_1": 1, "q": []}, - ) - else: - assert isnotin - last_query( - "SELECT (%(param_1:INT64)s NOT IN UNNEST([ ])) AS `anon_1`", {"param_1": 1} - ) + last_query( + "SELECT (%(param_1:INT64)s NOT IN UNNEST(%(q:INT64)s)) AS `anon_1`", + {"param_1": 1, "q": []}, + ) def test_literal_binds_kwarg_with_an_IN_operator_252(faux_conn): @@ -391,7 +318,6 @@ def nstr(q): ) -@sqlalchemy_1_4_or_higher @pytest.mark.parametrize("alias", [True, False]) def test_unnest(faux_conn, alias): from sqlalchemy import String @@ -409,7 +335,6 @@ def test_unnest(faux_conn, alias): ) -@sqlalchemy_1_4_or_higher @pytest.mark.parametrize("alias", [True, False]) def test_table_valued_alias_w_multiple_references_to_the_same_table(faux_conn, alias): from sqlalchemy import String @@ -428,7 +353,6 @@ def test_table_valued_alias_w_multiple_references_to_the_same_table(faux_conn, a ) -@sqlalchemy_1_4_or_higher @pytest.mark.parametrize("alias", [True, False]) def test_unnest_w_no_table_references(faux_conn, alias): fcall = sqlalchemy.func.unnest([1, 2, 3]) @@ -452,10 +376,6 @@ def test_array_indexing(faux_conn, metadata): assert got == "SELECT `t`.`a`[OFFSET(%(a_1:INT64)s)] AS `anon_1` \nFROM `t`" -@pytest.mark.skipif( - packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"), - reason="regexp_match support requires version 1.4 or higher", -) def test_visit_regexp_match_op_binary(faux_conn): table = setup_table( faux_conn, @@ -472,10 +392,6 @@ def test_visit_regexp_match_op_binary(faux_conn): assert result == expected -@pytest.mark.skipif( - packaging.version.parse(sqlalchemy.__version__) < packaging.version.parse("1.4"), - reason="regexp_match support requires version 1.4 or higher", -) def test_visit_not_regexp_match_op_binary(faux_conn): table = setup_table( faux_conn, diff --git a/tests/unit/test_sqlalchemy_bigquery.py b/tests/unit/test_sqlalchemy_bigquery.py index d64e1b97..db20e2f0 100644 --- a/tests/unit/test_sqlalchemy_bigquery.py +++ b/tests/unit/test_sqlalchemy_bigquery.py @@ -10,7 +10,6 @@ from google.cloud import bigquery from google.cloud.bigquery.dataset import DatasetListItem from google.cloud.bigquery.table import TableListItem -import packaging.version import pytest import sqlalchemy @@ -227,12 +226,7 @@ def test_unnest_function(args, kw): f = sqlalchemy.func.unnest(*args, **kw) assert isinstance(f.type, sqlalchemy.String) - if packaging.version.parse(sqlalchemy.__version__) >= packaging.version.parse( - "1.4" - ): - assert isinstance( - sqlalchemy.select(f).subquery().c.unnest.type, sqlalchemy.String - ) + assert isinstance(sqlalchemy.select(f).subquery().c.unnest.type, sqlalchemy.String) @mock.patch("sqlalchemy_bigquery._helpers.create_bigquery_client") From 59408a5c105972ef62f0a14213e2acb43d4c5ffc Mon Sep 17 00:00:00 2001 From: Kira Date: Mon, 29 Jan 2024 23:42:43 -0800 Subject: [PATCH 42/62] chore: sqlalchemy test compliance suite cleanup (#1018) * chore: remove code for sqlalchemy before 1_4 * reformatted with black: * Removed sqlalchemy compliance tests from versions before 1.4 * removed code in base.py for sqlalchemy < 1.4 * fix coverage issues in base.py * temporarily commented out code lines not passing coverage for testing purposes * replaced functions previously removed for not passing cover * testing removing functions for coverage * add no cover tag to untested code and clean up commented out functions * fix lint issues * black * Readded deleted tests and renamed them from deprecated names * black * chore: sqlalchemy test compliance suite cleanup code * black * black --------- Co-authored-by: Sharoon Thomas --- .../test_dialect_compliance.py | 526 ++++++++---------- 1 file changed, 227 insertions(+), 299 deletions(-) diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index 5ce6f9ad..5420bf32 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -36,8 +36,14 @@ from sqlalchemy.testing.suite import ( CTETest as _CTETest, ExistsTest as _ExistsTest, + FetchLimitOffsetTest as _FetchLimitOffsetTest, + DifficultParametersTest as _DifficultParametersTest, + DistinctOnTest, + HasIndexTest, + IdentityAutoincrementTest, InsertBehaviorTest as _InsertBehaviorTest, LongNameBlowoutTest, + PostCompileParamsTest, QuotedNameArgumentTest, SimpleUpdateDeleteTest as _SimpleUpdateDeleteTest, TimestampMicrosecondsTest as _TimestampMicrosecondsTest, @@ -60,143 +66,13 @@ TrueDivTest as _TrueDivTest, IntegerTest as _IntegerTest, NumericTest as _NumericTest, - DifficultParametersTest as _DifficultParametersTest, - FetchLimitOffsetTest as _FetchLimitOffsetTest, StringTest as _StringTest, UuidTest as _UuidTest, ) - class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): - data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC) - - def test_select_direct(self, connection): - # This func added because this test was failing when passed the - # UTC timezone. - - def literal(value, type_=None): - assert value == self.data - - if type_ is not None: - assert type_ is self.datatype - - return sqlalchemy.sql.elements.literal(value, self.datatype) - - with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal): - super(TimestampMicrosecondsTest, self).test_select_direct(connection) - - def test_round_trip_executemany(self, connection): - unicode_table = self.tables.unicode_table - connection.execute( - unicode_table.insert(), - [{"id": i, "unicode_data": self.data} for i in range(3)], - ) - - rows = connection.execute(select(unicode_table.c.unicode_data)).fetchall() - eq_(rows, [(self.data,) for i in range(3)]) - for row in rows: - # 2.0 had no support for util.text_type - assert isinstance(row[0], str) - - sqlalchemy.testing.suite.test_types._UnicodeFixture.test_round_trip_executemany = ( - test_round_trip_executemany - ) - - class TrueDivTest(_TrueDivTest): - @pytest.mark.skip("BQ rounds based on datatype") - def test_floordiv_integer(self): - pass - - @pytest.mark.skip("BQ rounds based on datatype") - def test_floordiv_integer_bound(self): - pass - - class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): - """The base tests fail if operations return rows for some reason.""" - - def test_update(self): - t = self.tables.plain_pk - connection = config.db.connect() - # In SQLAlchemy 2.0, the datatype changed to dict in the following function. - r = connection.execute(t.update().where(t.c.id == 2), dict(data="d2_new")) - assert not r.is_insert - - eq_( - connection.execute(t.select().order_by(t.c.id)).fetchall(), - [(1, "d1"), (2, "d2_new"), (3, "d3")], - ) - - def test_delete(self): - t = self.tables.plain_pk - connection = config.db.connect() - r = connection.execute(t.delete().where(t.c.id == 2)) - assert not r.is_insert - eq_( - connection.execute(t.select().order_by(t.c.id)).fetchall(), - [(1, "d1"), (3, "d3")], - ) - - class InsertBehaviorTest(_InsertBehaviorTest): - @pytest.mark.skip( - "BQ has no autoinc and client-side defaults can't work for select." - ) - def test_insert_from_select_autoinc(cls): - pass - - @pytest.mark.skip( - "BQ has no autoinc and client-side defaults can't work for select." - ) - def test_no_results_for_non_returning_insert(cls): - pass - - # BQ has no autoinc and client-side defaults can't work for select - del _IntegerTest.test_huge_int_auto_accommodation - - class NumericTest(_NumericTest): - @testing.fixture - def do_numeric_test(self, metadata, connection): - def run(type_, input_, output, filter_=None, check_scale=False): - t = Table("t", metadata, Column("x", type_)) - t.create(connection) - connection.execute(t.insert(), [{"x": x} for x in input_]) - - result = {row[0] for row in connection.execute(t.select())} - output = set(output) - if filter_: - result = {filter_(x) for x in result} - output = {filter_(x) for x in output} - eq_(result, output) - if check_scale: - eq_([str(x) for x in result], [str(x) for x in output]) - - where_expr = True - - # Adding where clause for 2.0 compatibility - connection.execute(t.delete().where(where_expr)) - - # test that this is actually a number! - # note we have tiny scale here as we have tests with very - # small scale Numeric types. PostgreSQL will raise an error - # if you use values outside the available scale. - if type_.asdecimal: - test_value = decimal.Decimal("2.9") - add_value = decimal.Decimal("37.12") - else: - test_value = 2.9 - add_value = 37.12 - - connection.execute(t.insert(), {"x": test_value}) - assert_we_are_a_number = connection.scalar( - select(type_coerce(t.c.x + add_value, type_)) - ) - eq_( - round(assert_we_are_a_number, 3), - round(test_value + add_value, 3), - ) - - return run - class DifficultParametersTest(_DifficultParametersTest): - # removed parameters that dont work with bigquery + """There are some parameters that don't work with bigquery that were removed from this test""" + tough_parameters = testing.combinations( ("boring",), ("per cent",), @@ -301,34 +177,149 @@ def test_standalone_bindparam_escape_expanding( res = connection.scalars(stmt, {paramname: ["d", "a"]}).all() eq_(res, [1, 4]) - class FetchLimitOffsetTest(_FetchLimitOffsetTest): - @pytest.mark.skip("BigQuery doesn't allow an offset without a limit.") - def test_simple_offset(self): + # BQ has no autoinc and client-side defaults can't work for select + del _IntegerTest.test_huge_int_auto_accommodation + + class NumericTest(_NumericTest): + """Added a where clause for BQ compatibility.""" + + @testing.fixture + def do_numeric_test(self, metadata, connection): + def run(type_, input_, output, filter_=None, check_scale=False): + t = Table("t", metadata, Column("x", type_)) + t.create(connection) + connection.execute(t.insert(), [{"x": x} for x in input_]) + + result = {row[0] for row in connection.execute(t.select())} + output = set(output) + if filter_: + result = {filter_(x) for x in result} + output = {filter_(x) for x in output} + eq_(result, output) + if check_scale: + eq_([str(x) for x in result], [str(x) for x in output]) + + where_expr = True + + connection.execute(t.delete().where(where_expr)) + + if type_.asdecimal: + test_value = decimal.Decimal("2.9") + add_value = decimal.Decimal("37.12") + else: + test_value = 2.9 + add_value = 37.12 + + connection.execute(t.insert(), {"x": test_value}) + assert_we_are_a_number = connection.scalar( + select(type_coerce(t.c.x + add_value, type_)) + ) + eq_( + round(assert_we_are_a_number, 3), + round(test_value + add_value, 3), + ) + + return run + + class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): + """BQ has no support for BQ util.text_type""" + + data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC) + + def test_select_direct(self, connection): + # This func added because this test was failing when passed the + # UTC timezone. + + def literal(value, type_=None): + assert value == self.data + + if type_ is not None: + assert type_ is self.datatype + + return sqlalchemy.sql.elements.literal(value, self.datatype) + + with mock.patch("sqlalchemy.testing.suite.test_types.literal", literal): + super(TimestampMicrosecondsTest, self).test_select_direct(connection) + + def test_round_trip_executemany(self, connection): + unicode_table = self.tables.unicode_table + connection.execute( + unicode_table.insert(), + [{"id": i, "unicode_data": self.data} for i in range(3)], + ) + + rows = connection.execute(select(unicode_table.c.unicode_data)).fetchall() + eq_(rows, [(self.data,) for i in range(3)]) + for row in rows: + assert isinstance(row[0], str) + + sqlalchemy.testing.suite.test_types._UnicodeFixture.test_round_trip_executemany = ( + test_round_trip_executemany + ) + + class TrueDivTest(_TrueDivTest): + @pytest.mark.skip("BQ rounds based on datatype") + def test_floordiv_integer(self): pass - test_bound_offset = test_simple_offset - test_expr_offset = test_simple_offset_zero = test_simple_offset - test_limit_offset_nobinds = test_simple_offset # TODO figure out - # how to prevent this from failing - # The original test is missing an order by. + @pytest.mark.skip("BQ rounds based on datatype") + def test_floordiv_integer_bound(self): + pass - # The original test is missing an order by. + class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): + """The base tests fail if operations return rows for some reason.""" - # Also, note that sqlalchemy union is a union distinct, not a - # union all. This test caught that were were getting that wrong. - def test_limit_render_multiple_times(self, connection): - table = self.tables.some_table - stmt = select(table.c.id).order_by(table.c.id).limit(1).scalar_subquery() + def test_update(self): + t = self.tables.plain_pk + connection = config.db.connect() + # In SQLAlchemy 2.0, the datatype changed to dict in the following function. + r = connection.execute(t.update().where(t.c.id == 2), dict(data="d2_new")) + assert not r.is_insert - u = sqlalchemy.union(select(stmt), select(stmt)).subquery().select() + eq_( + connection.execute(t.select().order_by(t.c.id)).fetchall(), + [(1, "d1"), (2, "d2_new"), (3, "d3")], + ) - self._assert_result( - connection, - u, - [(1,)], + def test_delete(self): + t = self.tables.plain_pk + connection = config.db.connect() + r = connection.execute(t.delete().where(t.c.id == 2)) + assert not r.is_insert + eq_( + connection.execute(t.select().order_by(t.c.id)).fetchall(), + [(1, "d1"), (3, "d3")], + ) + + class StringTest(_StringTest): + """Added a where clause for BQ compatibility""" + + def test_dont_truncate_rightside( + self, metadata, connection, expr=None, expected=None + ): + t = Table( + "t", + metadata, + Column("x", String(2)), + Column("id", Integer, primary_key=True), + ) + t.create(connection) + connection.connection.commit() + connection.execute( + t.insert(), + [{"x": "AB", "id": 1}, {"x": "BC", "id": 2}, {"x": "AC", "id": 3}], ) + combinations = [("%B%", ["AB", "BC"]), ("A%C", ["AC"]), ("A%C%Z", [])] + + for args in combinations: + eq_( + connection.scalars(select(t.c.x).where(t.c.x.like(args[0]))).all(), + args[1], + ) class UuidTest(_UuidTest): + """BQ needs to pass in UUID as a string""" + @classmethod def define_tables(cls, metadata): Table( @@ -433,75 +424,60 @@ def test_uuid_returning(self, connection): eq_(row, (data, str_data, data, str_data)) - class StringTest(_StringTest): - def test_dont_truncate_rightside( - self, metadata, connection, expr=None, expected=None - ): - t = Table( - "t", - metadata, - Column("x", String(2)), - Column("id", Integer, primary_key=True), - ) - t.create(connection) - connection.connection.commit() - connection.execute( - t.insert(), - [{"x": "AB", "id": 1}, {"x": "BC", "id": 2}, {"x": "AC", "id": 3}], - ) - combinations = [("%B%", ["AB", "BC"]), ("A%C", ["AC"]), ("A%C%Z", [])] - - for args in combinations: - eq_( - connection.scalars(select(t.c.x).where(t.c.x.like(args[0]))).all(), - args[1], - ) - - # from else statement .... - del DistinctOnTest # expects unquoted table names. - del HasIndexTest # BQ doesn't do the indexes that SQLA is loooking for. - del IdentityAutoincrementTest # BQ doesn't do autoincrement - del PostCompileParamsTest # BQ adds backticks to bind parameters, causing failure of tests TODO: fix this? - else: from sqlalchemy.testing.suite import ( - FetchLimitOffsetTest as _FetchLimitOffsetTest, RowCountTest as _RowCountTest, ) - class FetchLimitOffsetTest(_FetchLimitOffsetTest): - @pytest.mark.skip("BigQuery doesn't allow an offset without a limit.") - def test_simple_offset(self): - pass - - test_bound_offset = test_simple_offset - test_expr_offset = test_simple_offset_zero = test_simple_offset - test_limit_offset_nobinds = test_simple_offset # TODO figure out - # how to prevent this from failing - # The original test is missing an order by. + del DifficultParametersTest # exercises column names illegal in BQ - # Also, note that sqlalchemy union is a union distinct, not a - # union all. This test caught that were were getting that wrong. - def test_limit_render_multiple_times(self, connection): - table = self.tables.some_table - stmt = select(table.c.id).order_by(table.c.id).limit(1).scalar_subquery() + class RowCountTest(_RowCountTest): + """""" - u = sqlalchemy.union(select(stmt), select(stmt)).subquery().select() + @classmethod + def insert_data(cls, connection): + cls.data = data = [ + ("Angela", "A"), + ("Andrew", "A"), + ("Anand", "A"), + ("Bob", "B"), + ("Bobette", "B"), + ("Buffy", "B"), + ("Charlie", "C"), + ("Cynthia", "C"), + ("Chris", "C"), + ] - self._assert_result( - connection, - u, - [(1,)], + employees_table = cls.tables.employees + connection.execute( + employees_table.insert(), + [ + {"employee_id": i, "name": n, "department": d} + for i, (n, d) in enumerate(data) + ], ) - del DifficultParametersTest # exercises column names illegal in BQ - del DistinctOnTest # expects unquoted table names. - del HasIndexTest # BQ doesn't do the indexes that SQLA is loooking for. - del IdentityAutoincrementTest # BQ doesn't do autoincrement + class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): + """The base tests fail if operations return rows for some reason.""" + + def test_update(self): + t = self.tables.plain_pk + r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new") + assert not r.is_insert + + eq_( + config.db.execute(t.select().order_by(t.c.id)).fetchall(), + [(1, "d1"), (2, "d2_new"), (3, "d3")], + ) - # This test makes makes assertions about generated sql and trips - # over the backquotes that we add everywhere. XXX Why do we do that? - del PostCompileParamsTest + def test_delete(self): + t = self.tables.plain_pk + r = config.db.execute(t.delete().where(t.c.id == 2)) + assert not r.is_insert + eq_( + config.db.execute(t.select().order_by(t.c.id)).fetchall(), + [(1, "d1"), (3, "d3")], + ) class TimestampMicrosecondsTest(_TimestampMicrosecondsTest): data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396, tzinfo=pytz.UTC) @@ -553,70 +529,15 @@ def test_round_trip_executemany(self, connection): test_round_trip_executemany ) - class RowCountTest(_RowCountTest): - @classmethod - def insert_data(cls, connection): - cls.data = data = [ - ("Angela", "A"), - ("Andrew", "A"), - ("Anand", "A"), - ("Bob", "B"), - ("Bobette", "B"), - ("Buffy", "B"), - ("Charlie", "C"), - ("Cynthia", "C"), - ("Chris", "C"), - ] - - employees_table = cls.tables.employees - connection.execute( - employees_table.insert(), - [ - {"employee_id": i, "name": n, "department": d} - for i, (n, d) in enumerate(data) - ], - ) - - class InsertBehaviorTest(_InsertBehaviorTest): - @pytest.mark.skip( - "BQ has no autoinc and client-side defaults can't work for select." - ) - def test_insert_from_select_autoinc(cls): - pass - - class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): - """The base tests fail if operations return rows for some reason.""" - - def test_update(self): - t = self.tables.plain_pk - r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new") - assert not r.is_insert - - eq_( - config.db.execute(t.select().order_by(t.c.id)).fetchall(), - [(1, "d1"), (2, "d2_new"), (3, "d3")], - ) - - def test_delete(self): - t = self.tables.plain_pk - r = config.db.execute(t.delete().where(t.c.id == 2)) - assert not r.is_insert - eq_( - config.db.execute(t.select().order_by(t.c.id)).fetchall(), - [(1, "d1"), (3, "d3")], - ) - - -# Quotes aren't allowed in BigQuery table names. -del QuotedNameArgumentTest +class CTETest(_CTETest): + @pytest.mark.skip("Can't use CTEs with insert") + def test_insert_from_select_round_trip(self): + pass -# class InsertBehaviorTest(_InsertBehaviorTest): -# @pytest.mark.skip( -# "BQ has no autoinc and client-side defaults can't work for select." -# ) -# def test_insert_from_select_autoinc(cls): -# pass + @pytest.mark.skip("Recusive CTEs aren't supported.") + def test_select_recursive_round_trip(self): + pass class ExistsTest(_ExistsTest): @@ -651,42 +572,43 @@ def test_select_exists_false(self, connection): ) -# This test requires features (indexes, primary keys, etc., that BigQuery doesn't have. -del LongNameBlowoutTest - +class FetchLimitOffsetTest(_FetchLimitOffsetTest): + @pytest.mark.skip("BigQuery doesn't allow an offset without a limit.") + def test_simple_offset(self): + pass -# class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): -# """The base tests fail if operations return rows for some reason.""" + test_bound_offset = test_simple_offset + test_expr_offset = test_simple_offset_zero = test_simple_offset + test_limit_offset_nobinds = test_simple_offset # TODO figure out + # how to prevent this from failing + # The original test is missing an order by. -# def test_update(self): -# t = self.tables.plain_pk -# r = config.db.execute(t.update().where(t.c.id == 2), data="d2_new") -# assert not r.is_insert -# # assert not r.returns_rows + # Also, note that sqlalchemy union is a union distinct, not a + # union all. This test caught that were were getting that wrong. + def test_limit_render_multiple_times(self, connection): + table = self.tables.some_table + stmt = select(table.c.id).order_by(table.c.id).limit(1).scalar_subquery() -# eq_( -# config.db.execute(t.select().order_by(t.c.id)).fetchall(), -# [(1, "d1"), (2, "d2_new"), (3, "d3")], -# ) + u = sqlalchemy.union(select(stmt), select(stmt)).subquery().select() -# def test_delete(self): -# t = self.tables.plain_pk -# r = config.db.execute(t.delete().where(t.c.id == 2)) -# assert not r.is_insert -# # assert not r.returns_rows -# eq_( -# config.db.execute(t.select().order_by(t.c.id)).fetchall(), -# [(1, "d1"), (3, "d3")], -# ) + self._assert_result( + connection, + u, + [(1,)], + ) -class CTETest(_CTETest): - @pytest.mark.skip("Can't use CTEs with insert") - def test_insert_from_select_round_trip(self): +class InsertBehaviorTest(_InsertBehaviorTest): + @pytest.mark.skip( + "BQ has no autoinc and client-side defaults can't work for select." + ) + def test_insert_from_select_autoinc(cls): pass - @pytest.mark.skip("Recusive CTEs aren't supported.") - def test_select_recursive_round_trip(self): + @pytest.mark.skip( + "BQ has no autoinc and client-side defaults can't work for select." + ) + def test_no_results_for_non_returning_insert(cls): pass @@ -706,3 +628,9 @@ def test_select_recursive_round_trip(self): del ArrayTest # only appears to apply to postgresql del BizarroCharacterFKResolutionTest del HasTableTest.test_has_table_cache # TODO confirm whether BQ has table caching +del DistinctOnTest # expects unquoted table names. +del HasIndexTest # BQ doesn't do the indexes that SQLA is loooking for. +del IdentityAutoincrementTest # BQ doesn't do autoincrement +del LongNameBlowoutTest # Requires features (indexes, primary keys, etc., that BigQuery doesn't have. +del PostCompileParamsTest # BQ adds backticks to bind parameters, causing failure of tests TODO: fix this? +del QuotedNameArgumentTest # Quotes aren't allowed in BigQuery table names. From 1bbf9eb275aa5976ae52ed2a003b1bc2e836a8e2 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Tue, 30 Jan 2024 11:53:18 -0800 Subject: [PATCH 43/62] create development release 1.11.0.dev1 branch --- CHANGELOG.md | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 275f5fed..07343b9e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,13 +13,20 @@ Older versions of this project were distributed as [pybigquery][0]. [2]: https://pypi.org/project/pybigquery/#history +## [1.11.0.dev1](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.11.0.dev1) (2024-01-30) + + +### Bug Fixes + +* Fix coverage test issues in SQLAlchemy migration ([#987](https://github.com/googleapis/python-bigquery-sqlalchemy/pull/987)) +* Cleanup test_sqlalchemy_dialect file for readability ([#1018](https://github.com/googleapis/python-bigquery-sqlalchemy/pull/1018)) + ## [1.11.0.dev0](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.11.0.dev0) (2024-01-25) ### Features * Drop support for SQLAlchemy versions 1.2 and 1.3, maintain support for 1.4 and add support for 2.0 ([#920](https://github.com/googleapis/python-bigquery-sqlalchemy/pull/920)) -* Fix coverage test issues in SQLAlchemy migration ([#987](https://github.com/googleapis/python-bigquery-sqlalchemy/pull/987)) ## [1.9.0](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.8.0...v1.9.0) (2023-12-10) From 5cfc28089baa6106cf30f9efb268231792da9251 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Wed, 31 Jan 2024 15:43:53 -0800 Subject: [PATCH 44/62] feat: grouping sets, rollup and cube compatibility --- sqlalchemy_bigquery/base.py | 34 ++++++++++++++++++-- tests/unit/test_compiler.py | 64 +++++++++++++++++++++++++++++++++++++ 2 files changed, 95 insertions(+), 3 deletions(-) diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index f4266f13..4548170a 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -38,6 +38,7 @@ import sqlalchemy import sqlalchemy.sql.expression import sqlalchemy.sql.functions +from sqlalchemy.sql.functions import rollup, cube, grouping_sets import sqlalchemy.sql.sqltypes import sqlalchemy.sql.type_api from sqlalchemy.exc import NoSuchTableError, NoSuchColumnError @@ -340,9 +341,36 @@ def visit_label(self, *args, within_group_by=False, **kwargs): return super(BigQueryCompiler, self).visit_label(*args, **kwargs) def group_by_clause(self, select, **kw): - return super(BigQueryCompiler, self).group_by_clause( - select, **kw, within_group_by=True - ) + grouping_sets_exprs = [] + rollup_exprs = [] + cube_exprs = [] + + # Traverse select statement to extract grouping sets, rollup, and cube expressions + for expr in select._group_by_clause: + if isinstance(expr, grouping_sets): + grouping_sets_exprs.append( + self.process(expr.clauses) + ) # Assuming SQLAlchemy syntax + elif isinstance(expr, rollup): # Assuming SQLAlchemy syntax + rollup_exprs.append(self.process(expr.clauses)) + elif isinstance(expr, cube): # Assuming SQLAlchemy syntax + cube_exprs.append(self.process(expr.clauses)) + else: + # Handle regular group by expressions + pass + + clause = super(BigQueryCompiler, self).group_by_clause(select, **kw) + + if grouping_sets_exprs: + clause = ( + f"GROUP BY {clause} GROUPING SETS ({', '.join(grouping_sets_exprs)})" + ) + if rollup_exprs: + clause = f"GROUP BY {clause} ROLLUP ({', '.join(rollup_exprs)})" + if cube_exprs: + clause = f"GROUP BY {clause} CUBE ({', '.join(cube_exprs)})" + + return clause ############################################################################ # Handle parameters in in diff --git a/tests/unit/test_compiler.py b/tests/unit/test_compiler.py index 139b6cbc..dc5d4438 100644 --- a/tests/unit/test_compiler.py +++ b/tests/unit/test_compiler.py @@ -22,6 +22,7 @@ from .conftest import setup_table from .conftest import sqlalchemy_1_4_or_higher, sqlalchemy_before_1_4 +from sqlalchemy.sql.functions import rollup, cube, grouping_sets def test_constraints_are_ignored(faux_conn, metadata): @@ -278,3 +279,66 @@ def test_no_implicit_join_for_inner_unnest_no_table2_column(faux_conn, metadata) ) found_outer_sql = q.compile(faux_conn).string assert found_outer_sql == expected_outer_sql + + +def test_grouping_sets(faux_conn, metadata): + table = setup_table( + faux_conn, + "table1", + metadata, + sqlalchemy.Column("foo", sqlalchemy.Integer), + sqlalchemy.Column("bar", sqlalchemy.Integer), + ) + + q = sqlalchemy.select(table.c.foo, table.c.bar).group_by( + grouping_sets(table.c.foo, table.c.bar) + ) + + expected_sql = ( + "SELECT `table1`.`foo`, `table1`.`bar` \n" + "FROM `table1` GROUP BY GROUPING SETS ((`table1`.`foo`), (`table1`.`bar`))" + ) + found_sql = q.compile(faux_conn).string + assert found_sql == expected_sql + + +def test_rollup(faux_conn, metadata): + table = setup_table( + faux_conn, + "table1", + metadata, + sqlalchemy.Column("foo", sqlalchemy.Integer), + sqlalchemy.Column("bar", sqlalchemy.Integer), + ) + + q = sqlalchemy.select(table.c.foo, table.c.bar).group_by( + rollup(table.c.foo, table.c.bar) + ) + + expected_sql = ( + "SELECT `table1`.`foo`, `table1`.`bar` \n" + "FROM `table1` GROUP BY ROLLUP(`table1`.`foo`, `table1`.`bar`)" + ) + found_sql = q.compile(faux_conn).string + assert found_sql == expected_sql + + +def test_cube(faux_conn, metadata): + table = setup_table( + faux_conn, + "table1", + metadata, + sqlalchemy.Column("foo", sqlalchemy.Integer), + sqlalchemy.Column("bar", sqlalchemy.Integer), + ) + + q = sqlalchemy.select(table.c.foo, table.c.bar).group_by( + cube(table.c.foo, table.c.bar) + ) + + expected_sql = ( + "SELECT `table1`.`foo`, `table1`.`bar` \n" + "FROM `table1` GROUP BY CUBE(`table1`.`foo`, `table1`.`bar`)" + ) + found_sql = q.compile(faux_conn).string + assert found_sql == expected_sql From 0c4cf07dce7e70cd04b196ca72193a65f95e3567 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Thu, 1 Feb 2024 10:05:56 -0800 Subject: [PATCH 45/62] create development release 1.11.0.dev2 --- CHANGELOG.md | 2 ++ sqlalchemy_bigquery/version.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 07343b9e..5d005bea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,8 @@ Older versions of this project were distributed as [pybigquery][0]. [2]: https://pypi.org/project/pybigquery/#history +## [1.11.0.dev2](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.11.0.dev2) (2024-02-01) + ## [1.11.0.dev1](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.11.0.dev1) (2024-01-30) diff --git a/sqlalchemy_bigquery/version.py b/sqlalchemy_bigquery/version.py index f15b4f67..51caf928 100644 --- a/sqlalchemy_bigquery/version.py +++ b/sqlalchemy_bigquery/version.py @@ -17,4 +17,4 @@ # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -__version__ = "1.9.0" +__version__ = "1.11.0.dev2" From e82f5ddcd20bd0cb40cabee28a2aaeea220a5396 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Thu, 1 Feb 2024 10:32:14 -0800 Subject: [PATCH 46/62] test commit to run kokooro tests --- testing/constraints-3.8.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index 667a747d..2aa0aa7f 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -3,7 +3,7 @@ # List *all* library dependencies and extras in this file. # Pin the version to the lower bound. # -# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# e.g., if setup.py has "foo >= 1.14.1, < 2.0.0dev", sqlalchemy==1.4.16 google-auth==1.25.0 google-cloud-bigquery==3.3.6 From ece7f1fb77c64c658af311c8e46f1daaaee0b6fb Mon Sep 17 00:00:00 2001 From: kiraksi Date: Thu, 1 Feb 2024 10:58:53 -0800 Subject: [PATCH 47/62] removed unnecessary clause function changes, edited tests --- sqlalchemy_bigquery/base.py | 33 ++------------------------------- testing/constraints-3.8.txt | 2 +- tests/unit/test_compiler.py | 2 +- 3 files changed, 4 insertions(+), 33 deletions(-) diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index 0ba602f9..3c191cce 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -342,37 +342,8 @@ def visit_label(self, *args, within_group_by=False, **kwargs): kwargs["render_label_as_label"] = args[0] return super(BigQueryCompiler, self).visit_label(*args, **kwargs) - def group_by_clause(self, select, **kw): - grouping_sets_exprs = [] - rollup_exprs = [] - cube_exprs = [] - - # Traverse select statement to extract grouping sets, rollup, and cube expressions - for expr in select._group_by_clause: - if isinstance(expr, grouping_sets): - grouping_sets_exprs.append( - self.process(expr.clauses) - ) # Assuming SQLAlchemy syntax - elif isinstance(expr, rollup): # Assuming SQLAlchemy syntax - rollup_exprs.append(self.process(expr.clauses)) - elif isinstance(expr, cube): # Assuming SQLAlchemy syntax - cube_exprs.append(self.process(expr.clauses)) - else: - # Handle regular group by expressions - pass - - clause = super(BigQueryCompiler, self).group_by_clause(select, **kw) - - if grouping_sets_exprs: - clause = ( - f"GROUP BY {clause} GROUPING SETS ({', '.join(grouping_sets_exprs)})" - ) - if rollup_exprs: - clause = f"GROUP BY {clause} ROLLUP ({', '.join(rollup_exprs)})" - if cube_exprs: - clause = f"GROUP BY {clause} CUBE ({', '.join(cube_exprs)})" - - return clause + def group_by_clause(self, select, **kwargs): + return super(BigQueryCompiler, self).group_by_clause(select, **kwargs) ############################################################################ # Handle parameters in in diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index 2aa0aa7f..667a747d 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -3,7 +3,7 @@ # List *all* library dependencies and extras in this file. # Pin the version to the lower bound. # -# e.g., if setup.py has "foo >= 1.14.1, < 2.0.0dev", +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", sqlalchemy==1.4.16 google-auth==1.25.0 google-cloud-bigquery==3.3.6 diff --git a/tests/unit/test_compiler.py b/tests/unit/test_compiler.py index 4857308e..1b22b31f 100644 --- a/tests/unit/test_compiler.py +++ b/tests/unit/test_compiler.py @@ -297,7 +297,7 @@ def test_grouping_sets(faux_conn, metadata): expected_sql = ( "SELECT `table1`.`foo`, `table1`.`bar` \n" - "FROM `table1` GROUP BY GROUPING SETS ((`table1`.`foo`), (`table1`.`bar`))" + "FROM `table1` GROUP BY GROUPING SETS(`table1`.`foo`, `table1`.`bar`)" ) found_sql = q.compile(faux_conn).string assert found_sql == expected_sql From 68afc3959d988b77a244699e7d5e4f39bd233917 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Thu, 1 Feb 2024 11:06:06 -0800 Subject: [PATCH 48/62] test basic implementation of group_by_clause and visit_label --- sqlalchemy_bigquery/base.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index 3c191cce..61808998 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -38,7 +38,6 @@ import sqlalchemy import sqlalchemy.sql.expression import sqlalchemy.sql.functions -from sqlalchemy.sql.functions import rollup, cube, grouping_sets import sqlalchemy.sql.sqltypes import sqlalchemy.sql.type_api from sqlalchemy.exc import NoSuchTableError, NoSuchColumnError @@ -333,13 +332,7 @@ def visit_column( return self.preparer.quote(tablename) + "." + name - def visit_label(self, *args, within_group_by=False, **kwargs): - # Use labels in GROUP BY clause. - # - # Flag set in the group_by_clause method. Works around missing - # equivalent to supports_simple_order_by_label for group by. - if within_group_by: - kwargs["render_label_as_label"] = args[0] + def visit_label(self, *args, **kwargs): return super(BigQueryCompiler, self).visit_label(*args, **kwargs) def group_by_clause(self, select, **kwargs): From bd38a5e14145ca77ec462e7cf4e9a989f2eb1fb3 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Mon, 5 Feb 2024 23:44:59 -0800 Subject: [PATCH 49/62] fixed render label as label assignment --- sqlalchemy_bigquery/base.py | 20 +++++++++++++++++--- tests/unit/test_compiler.py | 6 +++--- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index 61808998..4d232bc0 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -332,11 +332,25 @@ def visit_column( return self.preparer.quote(tablename) + "." + name - def visit_label(self, *args, **kwargs): + def visit_label(self, *args, within_group_by=False, **kwargs): + # Use labels in GROUP BY clause. + # + # Flag set in the group_by_clause method. Works around missing + # equivalent to supports_simple_order_by_label for group by. + if within_group_by: + if all( + keyword not in str(args[0]) + for keyword in ("GROUPING SETS", "ROLLUP", "CUBE") + ): + kwargs["render_label_as_label"] = args[0] return super(BigQueryCompiler, self).visit_label(*args, **kwargs) - def group_by_clause(self, select, **kwargs): - return super(BigQueryCompiler, self).group_by_clause(select, **kwargs) + def group_by_clause(self, select, **kw): + return super(BigQueryCompiler, self).group_by_clause( + select, + **kw, + within_group_by=True, + ) ############################################################################ # Handle parameters in in diff --git a/tests/unit/test_compiler.py b/tests/unit/test_compiler.py index 1b22b31f..55157537 100644 --- a/tests/unit/test_compiler.py +++ b/tests/unit/test_compiler.py @@ -288,7 +288,7 @@ def test_grouping_sets(faux_conn, metadata): "table1", metadata, sqlalchemy.Column("foo", sqlalchemy.Integer), - sqlalchemy.Column("bar", sqlalchemy.Integer), + sqlalchemy.Column("bar", sqlalchemy.ARRAY(sqlalchemy.Integer)), ) q = sqlalchemy.select(table.c.foo, table.c.bar).group_by( @@ -309,7 +309,7 @@ def test_rollup(faux_conn, metadata): "table1", metadata, sqlalchemy.Column("foo", sqlalchemy.Integer), - sqlalchemy.Column("bar", sqlalchemy.Integer), + sqlalchemy.Column("bar", sqlalchemy.ARRAY(sqlalchemy.Integer)), ) q = sqlalchemy.select(table.c.foo, table.c.bar).group_by( @@ -330,7 +330,7 @@ def test_cube(faux_conn, metadata): "table1", metadata, sqlalchemy.Column("foo", sqlalchemy.Integer), - sqlalchemy.Column("bar", sqlalchemy.Integer), + sqlalchemy.Column("bar", sqlalchemy.ARRAY(sqlalchemy.Integer)), ) q = sqlalchemy.select(table.c.foo, table.c.bar).group_by( From 515481458952deb31a280c3b3987c67e694062fb Mon Sep 17 00:00:00 2001 From: kiraksi Date: Tue, 6 Feb 2024 13:51:51 -0800 Subject: [PATCH 50/62] added test case --- sqlalchemy_bigquery/base.py | 5 ++--- tests/unit/test_compiler.py | 22 ++++++++++++++++++++++ 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index 4d232bc0..4987b914 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -343,13 +343,12 @@ def visit_label(self, *args, within_group_by=False, **kwargs): for keyword in ("GROUPING SETS", "ROLLUP", "CUBE") ): kwargs["render_label_as_label"] = args[0] + return super(BigQueryCompiler, self).visit_label(*args, **kwargs) def group_by_clause(self, select, **kw): return super(BigQueryCompiler, self).group_by_clause( - select, - **kw, - within_group_by=True, + select, **kw, within_group_by=True ) ############################################################################ diff --git a/tests/unit/test_compiler.py b/tests/unit/test_compiler.py index 55157537..903e7195 100644 --- a/tests/unit/test_compiler.py +++ b/tests/unit/test_compiler.py @@ -26,6 +26,7 @@ sqlalchemy_before_2_0, ) from sqlalchemy.sql.functions import rollup, cube, grouping_sets +from sqlalchemy import func def test_constraints_are_ignored(faux_conn, metadata): @@ -343,3 +344,24 @@ def test_cube(faux_conn, metadata): ) found_sql = q.compile(faux_conn).string assert found_sql == expected_sql + + +def test_multiple_grouping_sets(faux_conn, metadata): + table = setup_table( + faux_conn, + "table1", + metadata, + sqlalchemy.Column("foo", sqlalchemy.Integer), + sqlalchemy.Column("bar", sqlalchemy.ARRAY(sqlalchemy.Integer)), + ) + + q = sqlalchemy.select(table.c.foo, table.c.bar).group_by( + grouping_sets(table.c.foo, table.c.bar), grouping_sets(table.c.foo) + ) + + expected_sql = ( + "SELECT `table1`.`foo`, `table1`.`bar` \n" + "FROM `table1` GROUP BY GROUPING SETS(`table1`.`foo`, `table1`.`bar`), GROUPING SETS(`table1`.`foo`)" + ) + found_sql = q.compile(faux_conn).string + assert found_sql == expected_sql From 033d3294d52e03ee2c9f72a8dac5ac2bd6da8e4e Mon Sep 17 00:00:00 2001 From: kiraksi Date: Thu, 8 Feb 2024 09:47:44 -0800 Subject: [PATCH 51/62] reformat logic --- sqlalchemy_bigquery/base.py | 12 +++++++----- tests/unit/test_compiler.py | 1 - 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index 4987b914..765ddb67 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -338,11 +338,13 @@ def visit_label(self, *args, within_group_by=False, **kwargs): # Flag set in the group_by_clause method. Works around missing # equivalent to supports_simple_order_by_label for group by. if within_group_by: - if all( - keyword not in str(args[0]) - for keyword in ("GROUPING SETS", "ROLLUP", "CUBE") - ): - kwargs["render_label_as_label"] = args[0] + column_label = args[0] + sql_keywords = {"GROUPING SETS", "ROLLUP", "CUBE"} + for keyword in sql_keywords: + if keyword in str(column_label): + break + else: # for/else always happens unless break gets called + kwargs["render_label_as_label"] = column_label return super(BigQueryCompiler, self).visit_label(*args, **kwargs) diff --git a/tests/unit/test_compiler.py b/tests/unit/test_compiler.py index 903e7195..def13cfd 100644 --- a/tests/unit/test_compiler.py +++ b/tests/unit/test_compiler.py @@ -26,7 +26,6 @@ sqlalchemy_before_2_0, ) from sqlalchemy.sql.functions import rollup, cube, grouping_sets -from sqlalchemy import func def test_constraints_are_ignored(faux_conn, metadata): From 0c882b96eb1d54c3e4211ac53e1fba2027fb4435 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Thu, 8 Feb 2024 09:53:56 -0800 Subject: [PATCH 52/62] test commit --- sqlalchemy_bigquery/base.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/sqlalchemy_bigquery/base.py b/sqlalchemy_bigquery/base.py index 765ddb67..e80f2891 100644 --- a/sqlalchemy_bigquery/base.py +++ b/sqlalchemy_bigquery/base.py @@ -402,8 +402,6 @@ def visit_not_in_op_binary(self, binary, operator, **kw): + ")" ) - visit_notin_op_binary = visit_not_in_op_binary # before 1.4 - ############################################################################ ############################################################################ From 87a75dc9fce72bd66556da1ecfc5044fee0bc1d0 Mon Sep 17 00:00:00 2001 From: kiraksi Date: Mon, 12 Feb 2024 12:05:57 -0800 Subject: [PATCH 53/62] create development build 1.11.0.dev3 --- CHANGELOG.md | 7 +++++++ sqlalchemy_bigquery/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5d005bea..6ad8b596 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,13 @@ Older versions of this project were distributed as [pybigquery][0]. [2]: https://pypi.org/project/pybigquery/#history +## [1.11.0.dev3](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.11.0.dev3) (2024-01-30) + + +### Bug Fixes + +* Fix grouping sets, rollup and cube rendering issue ([#1019](https://github.com/googleapis/python-bigquery-sqlalchemy/pull/1019)) + ## [1.11.0.dev2](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.11.0.dev2) (2024-02-01) ## [1.11.0.dev1](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.11.0.dev1) (2024-01-30) diff --git a/sqlalchemy_bigquery/version.py b/sqlalchemy_bigquery/version.py index 51caf928..265cef18 100644 --- a/sqlalchemy_bigquery/version.py +++ b/sqlalchemy_bigquery/version.py @@ -17,4 +17,4 @@ # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -__version__ = "1.11.0.dev2" +__version__ = "1.11.0.dev3" From a3cacd3b5bccc27970d246cbac9d9319a5611b54 Mon Sep 17 00:00:00 2001 From: Kira Date: Tue, 20 Feb 2024 11:42:48 -0800 Subject: [PATCH 54/62] chore: add more grouping sets/rollup/cube tests (#1029) * chore: add more tests for grouping functions fix * reformatted tests --- tests/unit/test_compiler.py | 118 ++++++++++++++++++++++-------------- 1 file changed, 71 insertions(+), 47 deletions(-) diff --git a/tests/unit/test_compiler.py b/tests/unit/test_compiler.py index def13cfd..cc9116e3 100644 --- a/tests/unit/test_compiler.py +++ b/tests/unit/test_compiler.py @@ -28,6 +28,23 @@ from sqlalchemy.sql.functions import rollup, cube, grouping_sets +@pytest.fixture +def table(faux_conn, metadata): + # Fixture to create a sample table for testing + + table = setup_table( + faux_conn, + "table1", + metadata, + sqlalchemy.Column("foo", sqlalchemy.Integer), + sqlalchemy.Column("bar", sqlalchemy.ARRAY(sqlalchemy.Integer)), + ) + + yield table + + table.drop(faux_conn) + + def test_constraints_are_ignored(faux_conn, metadata): sqlalchemy.Table( "ref", @@ -282,85 +299,92 @@ def test_no_implicit_join_for_inner_unnest_no_table2_column(faux_conn, metadata) assert found_outer_sql == expected_outer_sql -def test_grouping_sets(faux_conn, metadata): - table = setup_table( - faux_conn, - "table1", - metadata, - sqlalchemy.Column("foo", sqlalchemy.Integer), - sqlalchemy.Column("bar", sqlalchemy.ARRAY(sqlalchemy.Integer)), +grouping_ops = ( + "grouping_op, grouping_op_func", + [("GROUPING SETS", grouping_sets), ("ROLLUP", rollup), ("CUBE", cube)], +) + + +@pytest.mark.parametrize(*grouping_ops) +def test_grouping_ops_vs_single_column(faux_conn, table, grouping_op, grouping_op_func): + # Tests each of the grouping ops against a single column + + q = sqlalchemy.select(table.c.foo).group_by(grouping_op_func(table.c.foo)) + found_sql = q.compile(faux_conn).string + + expected_sql = ( + f"SELECT `table1`.`foo` \n" + f"FROM `table1` GROUP BY {grouping_op}(`table1`.`foo`)" ) + assert found_sql == expected_sql + + +@pytest.mark.parametrize(*grouping_ops) +def test_grouping_ops_vs_multi_columns(faux_conn, table, grouping_op, grouping_op_func): + # Tests each of the grouping ops against multiple columns + q = sqlalchemy.select(table.c.foo, table.c.bar).group_by( - grouping_sets(table.c.foo, table.c.bar) + grouping_op_func(table.c.foo, table.c.bar) ) + found_sql = q.compile(faux_conn).string expected_sql = ( - "SELECT `table1`.`foo`, `table1`.`bar` \n" - "FROM `table1` GROUP BY GROUPING SETS(`table1`.`foo`, `table1`.`bar`)" + f"SELECT `table1`.`foo`, `table1`.`bar` \n" + f"FROM `table1` GROUP BY {grouping_op}(`table1`.`foo`, `table1`.`bar`)" ) - found_sql = q.compile(faux_conn).string + assert found_sql == expected_sql -def test_rollup(faux_conn, metadata): - table = setup_table( - faux_conn, - "table1", - metadata, - sqlalchemy.Column("foo", sqlalchemy.Integer), - sqlalchemy.Column("bar", sqlalchemy.ARRAY(sqlalchemy.Integer)), - ) +@pytest.mark.parametrize(*grouping_ops) +def test_grouping_op_with_grouping_op(faux_conn, table, grouping_op, grouping_op_func): + # Tests multiple grouping ops in a single statement q = sqlalchemy.select(table.c.foo, table.c.bar).group_by( - rollup(table.c.foo, table.c.bar) + grouping_op_func(table.c.foo, table.c.bar), grouping_op_func(table.c.foo) ) + found_sql = q.compile(faux_conn).string expected_sql = ( - "SELECT `table1`.`foo`, `table1`.`bar` \n" - "FROM `table1` GROUP BY ROLLUP(`table1`.`foo`, `table1`.`bar`)" + f"SELECT `table1`.`foo`, `table1`.`bar` \n" + f"FROM `table1` GROUP BY {grouping_op}(`table1`.`foo`, `table1`.`bar`), {grouping_op}(`table1`.`foo`)" ) - found_sql = q.compile(faux_conn).string + assert found_sql == expected_sql -def test_cube(faux_conn, metadata): - table = setup_table( - faux_conn, - "table1", - metadata, - sqlalchemy.Column("foo", sqlalchemy.Integer), - sqlalchemy.Column("bar", sqlalchemy.ARRAY(sqlalchemy.Integer)), - ) +@pytest.mark.parametrize(*grouping_ops) +def test_grouping_ops_vs_group_by(faux_conn, table, grouping_op, grouping_op_func): + # Tests grouping op against regular group by statement q = sqlalchemy.select(table.c.foo, table.c.bar).group_by( - cube(table.c.foo, table.c.bar) + table.c.foo, grouping_op_func(table.c.bar) ) + found_sql = q.compile(faux_conn).string expected_sql = ( - "SELECT `table1`.`foo`, `table1`.`bar` \n" - "FROM `table1` GROUP BY CUBE(`table1`.`foo`, `table1`.`bar`)" + f"SELECT `table1`.`foo`, `table1`.`bar` \n" + f"FROM `table1` GROUP BY `table1`.`foo`, {grouping_op}(`table1`.`bar`)" ) - found_sql = q.compile(faux_conn).string + assert found_sql == expected_sql -def test_multiple_grouping_sets(faux_conn, metadata): - table = setup_table( - faux_conn, - "table1", - metadata, - sqlalchemy.Column("foo", sqlalchemy.Integer), - sqlalchemy.Column("bar", sqlalchemy.ARRAY(sqlalchemy.Integer)), - ) +@pytest.mark.parametrize(*grouping_ops) +def test_complex_grouping_ops_vs_nested_grouping_ops( + faux_conn, table, grouping_op, grouping_op_func +): + # Tests grouping ops nested within grouping ops q = sqlalchemy.select(table.c.foo, table.c.bar).group_by( - grouping_sets(table.c.foo, table.c.bar), grouping_sets(table.c.foo) + grouping_sets(table.c.foo, grouping_op_func(table.c.bar)) ) + found_sql = q.compile(faux_conn).string expected_sql = ( - "SELECT `table1`.`foo`, `table1`.`bar` \n" - "FROM `table1` GROUP BY GROUPING SETS(`table1`.`foo`, `table1`.`bar`), GROUPING SETS(`table1`.`foo`)" + f"SELECT `table1`.`foo`, `table1`.`bar` \n" + f"FROM `table1` GROUP BY GROUPING SETS(`table1`.`foo`, {grouping_op}(`table1`.`bar`))" ) - found_sql = q.compile(faux_conn).string + assert found_sql == expected_sql From 34edb6d9132d62a9d9da460b5e851f1888edfbfe Mon Sep 17 00:00:00 2001 From: kiraksi Date: Tue, 20 Feb 2024 14:28:32 -0800 Subject: [PATCH 55/62] update changelog --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6ad8b596..2f98741e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,12 +13,13 @@ Older versions of this project were distributed as [pybigquery][0]. [2]: https://pypi.org/project/pybigquery/#history -## [1.11.0.dev3](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.11.0.dev3) (2024-01-30) +## [1.11.0.dev3](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.11.0.dev3) (2024-02-20) ### Bug Fixes * Fix grouping sets, rollup and cube rendering issue ([#1019](https://github.com/googleapis/python-bigquery-sqlalchemy/pull/1019)) +* Add more grouping sets/rollup/cube tests ([#1029](https://github.com/googleapis/python-bigquery-sqlalchemy/pull/1029)) ## [1.11.0.dev2](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.11.0.dev2) (2024-02-01) From bcedd9c7b7ffc44f1dd3933cff49dae163951cc1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20Swe=C3=B1a=20=28Swast=29?= Date: Tue, 2 Apr 2024 15:34:43 -0500 Subject: [PATCH 56/62] revert changelog --- CHANGELOG.md | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1268ff93..52bbfe0e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,30 +13,6 @@ Older versions of this project were distributed as [pybigquery][0]. [2]: https://pypi.org/project/pybigquery/#history -## [1.11.0.dev3](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.11.0.dev3) (2024-02-20) - - -### Bug Fixes - -* Fix grouping sets, rollup and cube rendering issue ([#1019](https://github.com/googleapis/python-bigquery-sqlalchemy/pull/1019)) -* Add more grouping sets/rollup/cube tests ([#1029](https://github.com/googleapis/python-bigquery-sqlalchemy/pull/1029)) - -## [1.11.0.dev2](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.11.0.dev2) (2024-02-01) - -## [1.11.0.dev1](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.11.0.dev1) (2024-01-30) - - -### Bug Fixes - -* Fix coverage test issues in SQLAlchemy migration ([#987](https://github.com/googleapis/python-bigquery-sqlalchemy/pull/987)) -* Cleanup test_sqlalchemy_dialect file for readability ([#1018](https://github.com/googleapis/python-bigquery-sqlalchemy/pull/1018)) - -## [1.11.0.dev0](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.11.0.dev0) (2024-01-25) - - -### Features - -* Drop support for SQLAlchemy versions 1.2 and 1.3, maintain support for 1.4 and add support for 2.0 ([#920](https://github.com/googleapis/python-bigquery-sqlalchemy/pull/920)) ## [1.10.0](https://github.com/googleapis/python-bigquery-sqlalchemy/compare/v1.9.0...v1.10.0) (2024-02-27) From 924003e4122e4ab463f57f3c534356eaa8370045 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20Swe=C3=B1a=20=28Swast=29?= Date: Tue, 2 Apr 2024 15:36:11 -0500 Subject: [PATCH 57/62] remove note --- README.rst | 3 --- 1 file changed, 3 deletions(-) diff --git a/README.rst b/README.rst index b6693abb..5f77e86f 100644 --- a/README.rst +++ b/README.rst @@ -34,9 +34,6 @@ In order to use this library, you first need to go through the following steps: .. _Enable the BigQuery Storage API.: https://console.cloud.google.com/apis/library/bigquery.googleapis.com .. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html -.. note:: - This library is a prerelease to gauge compatiblity with SQLAlchemy - versions >= 1.4.16 and < 2.1 Installation ------------ From dfbfcc32c048469f43616300acb18225d559ae63 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20Swe=C3=B1a=20=28Swast=29?= Date: Tue, 2 Apr 2024 15:40:21 -0500 Subject: [PATCH 58/62] don't install prerelease in compliance session --- noxfile.py | 4 +--- setup.py | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/noxfile.py b/noxfile.py index fc0ed6c5..36729727 100644 --- a/noxfile.py +++ b/noxfile.py @@ -368,8 +368,6 @@ def compliance(session): if not os.path.exists(system_test_folder_path): session.skip("Compliance tests were not found") - session.install("--pre", "grpcio") - session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=1.4.16,<2.1") session.install( "mock", "pytest", @@ -543,7 +541,7 @@ def prerelease_deps(session): prerel_deps = [ "protobuf", - "sqlalchemy>=1.4.16,<2.1", + "sqlalchemy", # dependency of grpc "six", "googleapis-common-protos", diff --git a/setup.py b/setup.py index 31565afa..b33e1c6e 100644 --- a/setup.py +++ b/setup.py @@ -101,7 +101,7 @@ def readme(): "google-auth>=1.25.0,<3.0.0dev", # Work around pip wack. "google-cloud-bigquery>=3.3.6,<4.0.0dev", "packaging", - "sqlalchemy>=1.4.16,<2.1", + "sqlalchemy>=1.4.16,<3.0.0dev", ], extras_require=extras, python_requires=">=3.8, <3.13", From 853e5fd4301ae64a0c56f3bcd7c394dbfd725207 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20Swe=C3=B1a=20=28Swast=29?= Date: Tue, 2 Apr 2024 15:42:41 -0500 Subject: [PATCH 59/62] sync owlbot --- owlbot.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/owlbot.py b/owlbot.py index 4dfec18d..9d4aaafc 100644 --- a/owlbot.py +++ b/owlbot.py @@ -96,7 +96,7 @@ """"protobuf", # dependency of grpc""", """"protobuf", - "sqlalchemy>=1.4.16,<2.1", + "sqlalchemy", # dependency of grpc""", ) @@ -116,13 +116,6 @@ def place_before(path, text, *before_text, escape=None): s.replace([path], text, replacement) -place_before( - "noxfile.py", - "SYSTEM_TEST_PYTHON_VERSIONS=", - "", - "# We're using two Python versions to test with sqlalchemy>=1.4.16", -) - place_before( "noxfile.py", "nox.options.error_on_missing_interpreters = True", @@ -166,8 +159,6 @@ def compliance(session): if not os.path.exists(system_test_folder_path): session.skip("Compliance tests were not found") - session.install("--pre", "grpcio") - session.install("--pre", "--no-deps", "--upgrade", "sqlalchemy>=1.4.16,<2.1") session.install( "mock", "pytest", From b8fd1943f4091ed23f23303a28c471f9f029efe2 Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Wed, 3 Apr 2024 10:59:35 -0400 Subject: [PATCH 60/62] Update tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py --- tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index 5420bf32..1928e164 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -584,7 +584,7 @@ def test_simple_offset(self): # The original test is missing an order by. # Also, note that sqlalchemy union is a union distinct, not a - # union all. This test caught that were were getting that wrong. + # union all. This test caught that we were getting that wrong. def test_limit_render_multiple_times(self, connection): table = self.tables.some_table stmt = select(table.c.id).order_by(table.c.id).limit(1).scalar_subquery() From eda48130b9fb15c460ec4f104dd56b82ca1e9c0a Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Wed, 3 Apr 2024 12:19:01 -0500 Subject: [PATCH 61/62] make results order not matter --- .../sqlalchemy_dialect_compliance/test_dialect_compliance.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index 1928e164..58f9cfce 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -313,8 +313,8 @@ def test_dont_truncate_rightside( for args in combinations: eq_( - connection.scalars(select(t.c.x).where(t.c.x.like(args[0]))).all(), - args[1], + list(sorted(connection.scalars(select(t.c.x).where(t.c.x.like(args[0]))).all())), + list(sorted(args[1])), ) class UuidTest(_UuidTest): From 82c372ae7d3d635afee1a07454e25b7ed416be80 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 3 Apr 2024 17:21:17 +0000 Subject: [PATCH 62/62] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../test_dialect_compliance.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py index 58f9cfce..57cd9a0d 100644 --- a/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py +++ b/tests/sqlalchemy_dialect_compliance/test_dialect_compliance.py @@ -313,7 +313,13 @@ def test_dont_truncate_rightside( for args in combinations: eq_( - list(sorted(connection.scalars(select(t.c.x).where(t.c.x.like(args[0]))).all())), + list( + sorted( + connection.scalars( + select(t.c.x).where(t.c.x.like(args[0])) + ).all() + ) + ), list(sorted(args[1])), )