Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 18 additions & 7 deletions samples/snippets/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import Iterator
from typing import Generator, Iterator

from google.cloud import bigquery
from google.cloud import bigquery, storage
import pytest
import test_utils.prefixer

Expand Down Expand Up @@ -42,11 +42,27 @@ def bigquery_client() -> bigquery.Client:
return bigquery_client


@pytest.fixture(scope="session")
def storage_client(project_id: str) -> storage.Client:
return storage.Client(project=project_id)


@pytest.fixture(scope="session")
def project_id(bigquery_client: bigquery.Client) -> str:
return bigquery_client.project


@pytest.fixture(scope="session")
def gcs_bucket(storage_client: storage.Client) -> Generator[str, None, None]:
bucket_name = "bigframes_blob_test"

yield bucket_name

bucket = storage_client.get_bucket(bucket_name)
for blob in bucket.list_blobs():
blob.delete()


@pytest.fixture(autouse=True)
def reset_session() -> None:
"""An autouse fixture ensuring each sample runs in a fresh session.
Expand Down Expand Up @@ -78,11 +94,6 @@ def dataset_id_eu(bigquery_client: bigquery.Client, project_id: str) -> Iterator
bigquery_client.delete_dataset(dataset, delete_contents=True, not_found_ok=True)


@pytest.fixture(scope="session")
def gcs_dst_bucket() -> str:
return "gs://bigframes_blob_test"


@pytest.fixture
def random_model_id(
bigquery_client: bigquery.Client, project_id: str, dataset_id: str
Expand Down
4 changes: 2 additions & 2 deletions samples/snippets/multimodal_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@
# limitations under the License.


def test_multimodal_dataframe(gcs_dst_bucket: str) -> None:
def test_multimodal_dataframe(gcs_bucket: str) -> None:
# destination folder must be in a GCS bucket that the BQ connection service account (default or user provided) has write access to.
dst_bucket = gcs_dst_bucket
dst_bucket = f"gs://{gcs_bucket}"
# [START bigquery_dataframes_multimodal_dataframe_create]
import bigframes

Expand Down
12 changes: 11 additions & 1 deletion samples/snippets/sessions_and_io_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,10 @@
# limitations under the License.


def test_sessions_and_io(project_id: str, dataset_id: str) -> None:
def test_sessions_and_io(project_id: str, dataset_id: str, gcs_bucket: str) -> None:
YOUR_PROJECT_ID = project_id
YOUR_LOCATION = "us"
YOUR_BUCKET = gcs_bucket

# [START bigquery_dataframes_create_and_use_session_instance]
import bigframes
Expand Down Expand Up @@ -138,6 +139,15 @@ def test_sessions_and_io(project_id: str, dataset_id: str) -> None:
# [END bigquery_dataframes_read_data_from_csv]
assert df is not None

# [START bigquery_dataframes_write_data_to_csv]
import bigframes.pandas as bpd

df = bpd.DataFrame({"my_col": [1, 2, 3]})
# Write a dataframe to a CSV file in GCS
df.to_csv(f"gs://{YOUR_BUCKET}/myfile*.csv")
# [END bigquery_dataframes_write_data_to_csv]
assert df is not None

# [START bigquery_dataframes_read_data_from_bigquery_table]
import bigframes.pandas as bpd

Expand Down