Skip to content

Commit 7870a4a

Browse files
committed
test datalab magics and fix lint
1 parent 0bc1ea4 commit 7870a4a

File tree

2 files changed

+57
-55
lines changed

2 files changed

+57
-55
lines changed
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
google-cloud-bigquery[pandas,pyarrow]==1.7.0
22
datalab==1.1.4
33
ipython==7.2.0
4+
google-cloud-monitoring==0.28.1

bigquery/datalab-migration/samples_test.py

Lines changed: 56 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,8 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import os
1615
import time
1716

18-
from google.cloud import bigquery
1917
import pytest
2018
try:
2119
import IPython
@@ -25,16 +23,6 @@
2523
IPython = None
2624

2725

28-
@pytest.fixture
29-
def temp_dataset():
30-
client = bigquery.Client()
31-
dataset_id = "temp_dataset_{}".format(int(time.time() * 1000))
32-
dataset_ref = bigquery.DatasetReference(client.project, dataset_id)
33-
dataset = client.create_dataset(bigquery.Dataset(dataset_ref))
34-
yield dataset
35-
client.delete_dataset(dataset, delete_contents=True)
36-
37-
3826
@pytest.fixture(scope='session')
3927
def ipython():
4028
config = tools.default_config()
@@ -43,16 +31,6 @@ def ipython():
4331
return shell
4432

4533

46-
@pytest.fixture()
47-
def ipython_interactive(request, ipython):
48-
"""Activate IPython's builtin hooks
49-
50-
for the duration of the test scope.
51-
"""
52-
with ipython.builtin_trap:
53-
yield ipython
54-
55-
5634
@pytest.fixture
5735
def to_delete():
5836
from google.cloud import bigquery
@@ -65,9 +43,9 @@ def to_delete():
6543
client.delete_dataset(dataset, delete_contents=True)
6644

6745

68-
def _set_up_ipython():
46+
def _set_up_ipython(extension):
6947
ip = IPython.get_ipython()
70-
ip.extension_manager.load_extension('google.cloud.bigquery')
48+
ip.extension_manager.load_extension(extension)
7149
return ip
7250

7351

@@ -84,11 +62,10 @@ def _run_magic_sample(sample, ip):
8462

8563

8664
@pytest.mark.skipif(IPython is None, reason="Requires `ipython`")
87-
def test_query_magic(ipython):
88-
ip = _set_up_ipython()
65+
def test_datalab_query_magic(ipython):
66+
ip = _set_up_ipython('google.datalab.kernel')
8967

90-
# Datalab sample
91-
"""
68+
sample = """
9269
# [START bigquery_migration_datalab_query_magic]
9370
%%bq
9471
SELECT word, SUM(word_count) as count
@@ -97,6 +74,12 @@ def test_query_magic(ipython):
9774
ORDER BY count ASC
9875
# [END bigquery_migration_datalab_query_magic]
9976
"""
77+
_run_magic_sample(sample, ip)
78+
79+
80+
@pytest.mark.skipif(IPython is None, reason="Requires `ipython`")
81+
def test_client_library_query_magic(ipython):
82+
ip = _set_up_ipython('google.cloud.bigquery')
10083

10184
sample = """
10285
# [START bigquery_migration_client_library_query_magic]
@@ -111,11 +94,10 @@ def test_query_magic(ipython):
11194

11295

11396
@pytest.mark.skipif(IPython is None, reason="Requires `ipython`")
114-
def test_query_magic_results_variable(ipython):
115-
ip = _set_up_ipython()
97+
def test_datalab_query_magic_results_variable(ipython):
98+
ip = _set_up_ipython('google.datalab.kernel')
11699

117-
# Datalab sample
118-
"""
100+
sample = """
119101
# [START bigquery_migration_datalab_query_magic_results_variable]
120102
%%bq --name my_variable
121103
SELECT word, SUM(word_count) as count
@@ -124,6 +106,12 @@ def test_query_magic_results_variable(ipython):
124106
ORDER BY count ASC
125107
# [END bigquery_migration_datalab_query_magic_results_variable]
126108
"""
109+
_run_magic_sample(sample, ip)
110+
111+
112+
@pytest.mark.skipif(IPython is None, reason="Requires `ipython`")
113+
def test_client_library_query_magic_results_variable(ipython):
114+
ip = _set_up_ipython('google.cloud.bigquery')
127115

128116
sample = """
129117
# [START bigquery_migration_client_library_query_magic_results_variable]
@@ -138,33 +126,41 @@ def test_query_magic_results_variable(ipython):
138126

139127

140128
@pytest.mark.skipif(IPython is None, reason="Requires `ipython`")
141-
def test_query_magic_parameterized_query(ipython):
142-
ip = _set_up_ipython()
129+
def test_datalab_magic_parameterized_query(ipython):
130+
ip = _set_up_ipython('google.datalab.kernel')
143131

144-
# Datalab samples
145-
"""
146-
# [START bigquery_migration_datalab_magic_parameterized_query_define]
132+
sample = """
133+
# [START bigquery_migration_datalab_magic_define_parameterized_query]
147134
%%bq query -n my_variable
148135
SELECT word, SUM(word_count) as count
149136
FROM `bigquery-public-data.samples.shakespeare`
150137
WHERE corpus = @corpus_name
151138
GROUP BY word
152139
ORDER BY count ASC
153-
# [END bigquery_migration_datalab_magic_parameterized_query_define]
140+
# [END bigquery_migration_datalab_magic_define_parameterized_query]
141+
"""
142+
_run_magic_sample(sample, ip)
154143

155-
# [START bigquery_migration_datalab_magic_parameterized_query_execute]
144+
sample = """
145+
# [START bigquery_migration_datalab_magic_execute_parameterized_query]
156146
%%bq execute -q endpoint_stats
157147
parameters:
158148
- name: corpus_name
159149
type: STRING
160150
value: hamlet
161-
# [END bigquery_migration_datalab_magic_parameterized_query_execute]
151+
# [END bigquery_migration_datalab_magic_execute_parameterized_query]
162152
"""
153+
_run_magic_sample(sample, ip)
154+
155+
156+
@pytest.mark.skipif(IPython is None, reason="Requires `ipython`")
157+
def test_query_magic_parameterized_query(ipython):
158+
ip = _set_up_ipython('google.cloud.bigquery')
163159

164160
sample = """
165-
# [START bigquery_migration_client_library_magic_parameterized_query_define_parameter]
161+
# [START bigquery_migration_client_library_magic_query_params]
166162
params = {"corpus_name": "hamlet"}
167-
# [END bigquery_migration_client_library_magic_parameterized_query_define_parameter]
163+
# [END bigquery_migration_client_library_magic_query_params]
168164
"""
169165
_run_magic_sample(sample, ip)
170166

@@ -182,20 +178,25 @@ def test_query_magic_parameterized_query(ipython):
182178

183179

184180
@pytest.mark.skipif(IPython is None, reason="Requires `ipython`")
185-
def test_command_line_interface(ipython):
186-
ip = IPython.get_ipython()
181+
def test_datalab_list_tables_magic(ipython):
182+
ip = _set_up_ipython('google.datalab.kernel')
187183

188-
# Datalab sample
189-
"""
184+
sample = """
190185
# [START bigquery_migration_datalab_list_tables_magic]
191186
%bq tables list --dataset bigquery-public-data.samples
192187
# [END bigquery_migration_datalab_list_tables_magic]
193188
"""
189+
_run_magic_sample(sample, ip)
190+
191+
192+
@pytest.mark.skipif(IPython is None, reason="Requires `ipython`")
193+
def test_command_line_interface(ipython):
194+
ip = IPython.get_ipython()
194195

195196
sample = """
196-
# [START bigquery_migration_datalab_list_tables_magic]
197+
# [START bigquery_migration_command_line_list_tables]
197198
!bq ls bigquery-public-data:samples
198-
# [END bigquery_migration_datalab_list_tables_magic]
199+
# [END bigquery_migration_command_line_list_tables]
199200
"""
200201
_run_magic_sample(sample, ip)
201202

@@ -260,7 +261,7 @@ def test_datalab_load_table_from_gcs_csv(to_delete):
260261
'gs://cloud-samples-data/bigquery/us-states/us-states.csv',
261262
mode='append',
262263
source_format='csv',
263-
csv_options=bq.CSVOptions(skip_leading_rows = 1)
264+
csv_options=bq.CSVOptions(skip_leading_rows=1)
264265
) # Waits for the job to complete
265266
# [END bigquery_migration_datalab_load_table_from_gcs_csv]
266267

@@ -285,13 +286,13 @@ def test_client_library_load_table_from_gcs_csv(to_delete):
285286

286287
# Create the table
287288
job_config = bigquery.LoadJobConfig(
288-
schema = [
289+
schema=[
289290
bigquery.SchemaField('name', 'STRING'),
290291
bigquery.SchemaField('post_abbr', 'STRING')
291292
],
292-
skip_leading_rows = 1,
293+
skip_leading_rows=1,
293294
# The source format defaults to CSV, so the line below is optional.
294-
source_format = bigquery.SourceFormat.CSV
295+
source_format=bigquery.SourceFormat.CSV
295296
)
296297
load_job = client.load_table_from_uri(
297298
'gs://cloud-samples-data/bigquery/us-states/us-states.csv',
@@ -330,15 +331,16 @@ def test_datalab_load_table_from_dataframe(to_delete):
330331
},
331332
])
332333
schema = bq.Schema.from_data(dataframe)
333-
table = bq.Table('{}.monty_python'.format(dataset_id)).create(schema=schema)
334+
table = bq.Table(
335+
'{}.monty_python'.format(dataset_id)).create(schema=schema)
334336
table.insert(dataframe) # Starts steaming insert of data
335337
# [END bigquery_migration_datalab_load_table_from_dataframe]
336338
# The Datalab library uses tabledata().insertAll() to load data from
337339
# pandas DataFrames to tables. Because it can take a long time for the rows
338340
# to be available in the table, this test does not assert on the number of
339341
# rows in the destination table after the job is run. If errors are
340342
# encountered during the insertion, this test will fail.
341-
# See https://cloud.google.com/bigquery/streaming-data-into-bigquery#dataavailability
343+
# See https://cloud.google.com/bigquery/streaming-data-into-bigquery
342344

343345

344346
def test_client_library_load_table_from_dataframe(to_delete):
@@ -375,4 +377,3 @@ def test_client_library_load_table_from_dataframe(to_delete):
375377

376378
table = client.get_table(dataset.table('monty_python'))
377379
assert table.num_rows == 4
378-

0 commit comments

Comments
 (0)