Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 12 additions & 3 deletions bigquery/google/cloud/bigquery/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -842,7 +842,8 @@ def upload_from_file(self,
quote_character=None,
skip_leading_rows=None,
write_disposition=None,
client=None):
client=None,
job_name=None):
"""Upload the contents of this table from a file-like object.

The content type of the upload will either be
Expand Down Expand Up @@ -915,6 +916,10 @@ def upload_from_file(self,
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current dataset.

:type job_name: str
:param job_name: Optional. The id of the job. Generated if not
explicitly passed in.

:rtype: :class:`google.cloud.bigquery.jobs.LoadTableFromStorageJob`
:returns: the job instance used to load the data (e.g., for
querying status). Note that the job is already started:
Expand Down Expand Up @@ -977,7 +982,7 @@ def upload_from_file(self,
encoding, field_delimiter,
ignore_unknown_values, max_bad_records,
quote_character, skip_leading_rows,
write_disposition)
write_disposition, job_name)

upload = Upload(file_obj, content_type, total_bytes,
auto_transfer=False)
Expand Down Expand Up @@ -1033,7 +1038,8 @@ def _configure_job_metadata(metadata, # pylint: disable=too-many-arguments
max_bad_records,
quote_character,
skip_leading_rows,
write_disposition):
write_disposition,
job_name):
"""Helper for :meth:`Table.upload_from_file`."""
load_config = metadata['configuration']['load']

Expand Down Expand Up @@ -1067,6 +1073,9 @@ def _configure_job_metadata(metadata, # pylint: disable=too-many-arguments
if write_disposition is not None:
load_config['writeDisposition'] = write_disposition

if job_name is not None:
load_config['jobReference'] = {'jobId': job_name}


def _parse_schema_resource(info):
"""Parse a resource fragment into a schema field.
Expand Down
16 changes: 16 additions & 0 deletions bigquery/tests/unit/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -1844,6 +1844,22 @@ class _UploadConfig(object):
self.assertEqual(req['body'], BODY)
# pylint: enable=too-many-statements

def test_upload_from_file_w_jobid(self):
import json
from google.cloud._helpers import _to_bytes

requested, PATH, BODY = self._upload_from_file_helper(job_name='foo')
parse_chunk = _email_chunk_parser()
req = requested[0]
ctype, boundary = [x.strip()
for x in req['headers']['content-type'].split(';')]
divider = b'--' + _to_bytes(boundary[len('boundary="'):-1])
chunks = req['body'].split(divider)[1:-1] # discard prolog / epilog
text_msg = parse_chunk(chunks[0].strip())
metadata = json.loads(text_msg._payload)
load_config = metadata['configuration']['load']
self.assertEqual(load_config['jobReference'], {'jobId': 'foo'})


class Test_parse_schema_resource(unittest.TestCase, _SchemaBase):

Expand Down