forked from GoogleCloudPlatform/python-docs-samples
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathsnippets_test.py
More file actions
132 lines (89 loc) · 3.31 KB
/
snippets_test.py
File metadata and controls
132 lines (89 loc) · 3.31 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.cloud import bigquery
import pytest
import snippets
DATASET_ID = 'test_dataset'
TABLE_ID = 'test_table'
def test_list_projects():
snippets.list_projects()
# No need to check the ouput, lack of exception is enough.
def test_list_datasets(capsys):
# Requires the dataset to have been created in the test project.
snippets.list_datasets()
out, _ = capsys.readouterr()
assert DATASET_ID in out
@pytest.fixture
def cleanup_dataset():
dataset_name = 'test_temporary_dataset'
bigquery_client = bigquery.Client()
dataset = bigquery_client.dataset(dataset_name)
if dataset.exists():
dataset.delete()
yield dataset_name
if dataset.exists():
dataset.delete()
def test_create_dataset(capsys, cleanup_dataset):
snippets.create_dataset(cleanup_dataset)
out, _ = capsys.readouterr()
assert cleanup_dataset in out
def test_list_tables(capsys):
# Requires the dataset and table to have been created in the test project.
snippets.list_tables(DATASET_ID)
out, _ = capsys.readouterr()
assert TABLE_ID in out
def test_list_rows(capsys):
# Requires the dataset and table to have been created in the test project.
# Check for the schema. It's okay if the table is empty as long as there
# aren't any errors.
snippets.list_rows(DATASET_ID, TABLE_ID)
out, _ = capsys.readouterr()
assert 'Name' in out
assert 'Age' in out
@pytest.fixture
def temporary_table():
"""Fixture that returns a factory for tables that do not yet exist and
will be automatically deleted after the test."""
bigquery_client = bigquery.Client()
dataset = bigquery_client.dataset(DATASET_ID)
tables = []
def factory(table_name):
new_table = dataset.table(table_name)
if new_table.exists():
new_table.delete()
tables.append(new_table)
return new_table
yield factory
for table in tables:
if table.exists():
table.delete()
def test_create_table(temporary_table):
new_table = temporary_table('test_create_table')
snippets.create_table(DATASET_ID, new_table.name)
assert new_table.exists()
@pytest.mark.slow
def test_copy_table(temporary_table):
new_table = temporary_table('test_copy_table')
snippets.copy_table(DATASET_ID, TABLE_ID, new_table.name)
assert new_table.exists()
def test_delete_table():
# Create a table to delete
bigquery_client = bigquery.Client()
dataset = bigquery_client.dataset(DATASET_ID)
table = dataset.table('test_delete_table')
if not table.exists():
table.schema = [bigquery.SchemaField('id', 'INTEGER')]
table.create()
snippets.delete_table(DATASET_ID, table.name)
assert not table.exists()