Skip to content

Commit 6a3276e

Browse files
authored
Merge pull request tylertreat#107 from cbhimavarapu-datalicious/master
Allowing Write query result to table to accept maximumBillingTire argument
2 parents af4902a + d1d850d commit 6a3276e

File tree

2 files changed

+53
-4
lines changed

2 files changed

+53
-4
lines changed

bigquery/client.py

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1048,7 +1048,8 @@ def write_to_table(
10481048
priority=None,
10491049
create_disposition=None,
10501050
write_disposition=None,
1051-
use_legacy_sql=None
1051+
use_legacy_sql=None,
1052+
maximum_billing_tier=None
10521053
):
10531054
"""
10541055
Write query result to table. If dataset or table is not provided,
@@ -1077,9 +1078,15 @@ def write_to_table(
10771078
One of the JOB_CREATE_* constants
10781079
write_disposition : str, optional
10791080
One of the JOB_WRITE_* constants
1080-
use_legacy_sql:
1081-
If False, the query will use BigQuery's standard SQL (https://cloud.google.com/bigquery/sql-reference/)
1082-
1081+
use_legacy_sql: bool, optional
1082+
If False, the query will use BigQuery's standard SQL
1083+
(https://cloud.google.com/bigquery/sql-reference/)
1084+
maximum_billing_tier : integer, optional
1085+
Limits the billing tier for this job. Queries that have resource
1086+
usage beyond this tier will fail (without incurring a charge). If
1087+
unspecified, this will be set to your project default. For more
1088+
information,
1089+
see https://cloud.google.com/bigquery/pricing#high-compute
10831090
10841091
Returns
10851092
-------
@@ -1106,6 +1113,9 @@ def write_to_table(
11061113
if allow_large_results is not None:
11071114
configuration['allowLargeResults'] = allow_large_results
11081115

1116+
if maximum_billing_tier is not None:
1117+
configuration['maximumBillingTier'] = maximum_billing_tier
1118+
11091119
if use_query_cache is not None:
11101120
configuration['useQueryCache'] = use_query_cache
11111121

bigquery/tests/test_client.py

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1117,6 +1117,7 @@ def setUp(self):
11171117
self.project_id = 'project'
11181118
self.dataset_id = 'dataset'
11191119
self.table_id = 'table'
1120+
self.maximum_billing_tier = 1000
11201121
self.external_udf_uris = ['gs://bucket/external_udf.js']
11211122
self.use_query_cache = False
11221123
self.priority = "INTERACTIVE"
@@ -1162,6 +1163,44 @@ def test_write(self):
11621163

11631164
self.assertEqual(result, expected_result)
11641165

1166+
def test_write_maxbilltier(self):
1167+
""" Ensure that write is working when maximumBillingTier is set"""
1168+
expected_result = {
1169+
'status': {'state': u'RUNNING'},
1170+
}
1171+
1172+
body = {
1173+
"configuration": {
1174+
"query": {
1175+
"destinationTable": {
1176+
"projectId": self.project_id,
1177+
"datasetId": self.dataset_id,
1178+
"tableId": self.table_id
1179+
},
1180+
"query": self.query,
1181+
"userDefinedFunctionResources": [{
1182+
"resourceUri": self.external_udf_uris[0]
1183+
}],
1184+
"useQueryCache": self.use_query_cache,
1185+
"priority": self.priority,
1186+
"maximumBillingTier": self.maximum_billing_tier
1187+
}
1188+
}
1189+
}
1190+
1191+
self.mock_api.jobs().insert().execute.return_value = expected_result
1192+
result = self.client.write_to_table(
1193+
self.query, self.dataset_id, self.table_id, priority=self.priority,
1194+
external_udf_uris=self.external_udf_uris, use_query_cache=False,
1195+
maximum_billing_tier=self.maximum_billing_tier)
1196+
1197+
self.mock_api.jobs().insert.assert_called_with(
1198+
projectId=self.project_id,
1199+
body=body
1200+
)
1201+
1202+
self.assertEqual(result, expected_result)
1203+
11651204
def test_write_http_error(self):
11661205
""" Test write with http error"""
11671206
expected_result = {

0 commit comments

Comments
 (0)