@@ -2997,6 +2997,8 @@ def test_load_table_from_uri(self):
2997
2997
creds = _make_credentials ()
2998
2998
http = object ()
2999
2999
job_config = LoadJobConfig ()
3000
+ original_config_copy = copy .deepcopy (job_config )
3001
+
3000
3002
client = self ._make_one (project = self .PROJECT , credentials = creds , _http = http )
3001
3003
conn = client ._connection = make_connection (RESOURCE )
3002
3004
destination = client .dataset (self .DS_ID ).table (DESTINATION )
@@ -3010,6 +3012,9 @@ def test_load_table_from_uri(self):
3010
3012
method = "POST" , path = "/projects/%s/jobs" % self .PROJECT , data = RESOURCE
3011
3013
)
3012
3014
3015
+ # the original config object should not have been modified
3016
+ self .assertEqual (job_config .to_api_repr (), original_config_copy .to_api_repr ())
3017
+
3013
3018
self .assertIsInstance (job , LoadJob )
3014
3019
self .assertIsInstance (job ._configuration , LoadJobConfig )
3015
3020
self .assertIs (job ._client , client )
@@ -3496,19 +3501,24 @@ def test_copy_table_w_valid_job_config(self):
3496
3501
creds = _make_credentials ()
3497
3502
http = object ()
3498
3503
client = self ._make_one (project = self .PROJECT , credentials = creds , _http = http )
3499
- job_config = CopyJobConfig ()
3500
3504
conn = client ._connection = make_connection (RESOURCE )
3501
3505
dataset = client .dataset (self .DS_ID )
3502
3506
source = dataset .table (SOURCE )
3503
3507
destination = dataset .table (DESTINATION )
3504
3508
3509
+ job_config = CopyJobConfig ()
3510
+ original_config_copy = copy .deepcopy (job_config )
3505
3511
job = client .copy_table (source , destination , job_id = JOB , job_config = job_config )
3512
+
3506
3513
# Check that copy_table actually starts the job.
3507
3514
conn .api_request .assert_called_once_with (
3508
3515
method = "POST" , path = "/projects/%s/jobs" % self .PROJECT , data = RESOURCE
3509
3516
)
3510
3517
self .assertIsInstance (job ._configuration , CopyJobConfig )
3511
3518
3519
+ # the original config object should not have been modified
3520
+ assert job_config .to_api_repr () == original_config_copy .to_api_repr ()
3521
+
3512
3522
def test_extract_table (self ):
3513
3523
from google .cloud .bigquery .job import ExtractJob
3514
3524
@@ -3679,6 +3689,7 @@ def test_extract_table_generated_job_id(self):
3679
3689
source = dataset .table (SOURCE )
3680
3690
job_config = ExtractJobConfig ()
3681
3691
job_config .destination_format = DestinationFormat .NEWLINE_DELIMITED_JSON
3692
+ original_config_copy = copy .deepcopy (job_config )
3682
3693
3683
3694
job = client .extract_table (source , DESTINATION , job_config = job_config )
3684
3695
@@ -3695,6 +3706,9 @@ def test_extract_table_generated_job_id(self):
3695
3706
self .assertEqual (job .source , source )
3696
3707
self .assertEqual (list (job .destination_uris ), [DESTINATION ])
3697
3708
3709
+ # the original config object should not have been modified
3710
+ assert job_config .to_api_repr () == original_config_copy .to_api_repr ()
3711
+
3698
3712
def test_extract_table_w_destination_uris (self ):
3699
3713
from google .cloud .bigquery .job import ExtractJob
3700
3714
@@ -3840,6 +3854,7 @@ def test_query_w_explicit_job_config(self):
3840
3854
job_config = QueryJobConfig ()
3841
3855
job_config .use_query_cache = True
3842
3856
job_config .maximum_bytes_billed = 2000
3857
+ original_config_copy = copy .deepcopy (job_config )
3843
3858
3844
3859
client .query (
3845
3860
query , job_id = job_id , location = self .LOCATION , job_config = job_config
@@ -3850,6 +3865,105 @@ def test_query_w_explicit_job_config(self):
3850
3865
method = "POST" , path = "/projects/PROJECT/jobs" , data = resource
3851
3866
)
3852
3867
3868
+ # the original config object should not have been modified
3869
+ assert job_config .to_api_repr () == original_config_copy .to_api_repr ()
3870
+
3871
+ def test_query_preserving_explicit_job_config (self ):
3872
+ job_id = "some-job-id"
3873
+ query = "select count(*) from persons"
3874
+ resource = {
3875
+ "jobReference" : {
3876
+ "jobId" : job_id ,
3877
+ "projectId" : self .PROJECT ,
3878
+ "location" : self .LOCATION ,
3879
+ },
3880
+ "configuration" : {
3881
+ "query" : {
3882
+ "query" : query ,
3883
+ "useLegacySql" : False ,
3884
+ "useQueryCache" : True ,
3885
+ "maximumBytesBilled" : "2000" ,
3886
+ }
3887
+ },
3888
+ }
3889
+
3890
+ creds = _make_credentials ()
3891
+ http = object ()
3892
+
3893
+ from google .cloud .bigquery import QueryJobConfig
3894
+
3895
+ client = self ._make_one (project = self .PROJECT , credentials = creds , _http = http ,)
3896
+ conn = client ._connection = make_connection (resource )
3897
+
3898
+ job_config = QueryJobConfig ()
3899
+ job_config .use_query_cache = True
3900
+ job_config .maximum_bytes_billed = 2000
3901
+ original_config_copy = copy .deepcopy (job_config )
3902
+
3903
+ client .query (
3904
+ query , job_id = job_id , location = self .LOCATION , job_config = job_config
3905
+ )
3906
+
3907
+ # Check that query actually starts the job.
3908
+ conn .api_request .assert_called_once_with (
3909
+ method = "POST" , path = "/projects/PROJECT/jobs" , data = resource
3910
+ )
3911
+
3912
+ # the original config object should not have been modified
3913
+ assert job_config .to_api_repr () == original_config_copy .to_api_repr ()
3914
+
3915
+ def test_query_preserving_explicit_default_job_config (self ):
3916
+ job_id = "some-job-id"
3917
+ query = "select count(*) from persons"
3918
+ resource = {
3919
+ "jobReference" : {
3920
+ "jobId" : job_id ,
3921
+ "projectId" : self .PROJECT ,
3922
+ "location" : self .LOCATION ,
3923
+ },
3924
+ "configuration" : {
3925
+ "query" : {
3926
+ "query" : query ,
3927
+ "defaultDataset" : {
3928
+ "projectId" : self .PROJECT ,
3929
+ "datasetId" : "some-dataset" ,
3930
+ },
3931
+ "useLegacySql" : False ,
3932
+ "maximumBytesBilled" : "1000" ,
3933
+ }
3934
+ },
3935
+ }
3936
+
3937
+ creds = _make_credentials ()
3938
+ http = object ()
3939
+
3940
+ from google .cloud .bigquery import QueryJobConfig , DatasetReference
3941
+
3942
+ default_job_config = QueryJobConfig ()
3943
+ default_job_config .default_dataset = DatasetReference (
3944
+ self .PROJECT , "some-dataset"
3945
+ )
3946
+ default_job_config .maximum_bytes_billed = 1000
3947
+ default_config_copy = copy .deepcopy (default_job_config )
3948
+
3949
+ client = self ._make_one (
3950
+ project = self .PROJECT ,
3951
+ credentials = creds ,
3952
+ _http = http ,
3953
+ default_query_job_config = default_job_config ,
3954
+ )
3955
+ conn = client ._connection = make_connection (resource )
3956
+
3957
+ client .query (query , job_id = job_id , location = self .LOCATION , job_config = None )
3958
+
3959
+ # Check that query actually starts the job.
3960
+ conn .api_request .assert_called_once_with (
3961
+ method = "POST" , path = "/projects/PROJECT/jobs" , data = resource
3962
+ )
3963
+
3964
+ # the original default config object should not have been modified
3965
+ assert default_job_config .to_api_repr () == default_config_copy .to_api_repr ()
3966
+
3853
3967
def test_query_w_invalid_job_config (self ):
3854
3968
from google .cloud .bigquery import QueryJobConfig , DatasetReference
3855
3969
from google .cloud .bigquery import job
@@ -5429,22 +5543,24 @@ def test_load_table_from_file_resumable(self):
5429
5543
5430
5544
client = self ._make_client ()
5431
5545
file_obj = self ._make_file_obj ()
5546
+ job_config = self ._make_config ()
5547
+ original_config_copy = copy .deepcopy (job_config )
5432
5548
5433
5549
do_upload_patch = self ._make_do_upload_patch (
5434
5550
client , "_do_resumable_upload" , self .EXPECTED_CONFIGURATION
5435
5551
)
5436
5552
with do_upload_patch as do_upload :
5437
5553
client .load_table_from_file (
5438
- file_obj ,
5439
- self .TABLE_REF ,
5440
- job_id = "job_id" ,
5441
- job_config = self ._make_config (),
5554
+ file_obj , self .TABLE_REF , job_id = "job_id" , job_config = job_config ,
5442
5555
)
5443
5556
5444
5557
do_upload .assert_called_once_with (
5445
5558
file_obj , self .EXPECTED_CONFIGURATION , _DEFAULT_NUM_RETRIES
5446
5559
)
5447
5560
5561
+ # the original config object should not have been modified
5562
+ assert job_config .to_api_repr () == original_config_copy .to_api_repr ()
5563
+
5448
5564
def test_load_table_from_file_w_explicit_project (self ):
5449
5565
from google .cloud .bigquery .client import _DEFAULT_NUM_RETRIES
5450
5566
@@ -5790,6 +5906,7 @@ def test_load_table_from_dataframe_w_custom_job_config(self):
5790
5906
job_config = job .LoadJobConfig (
5791
5907
write_disposition = job .WriteDisposition .WRITE_TRUNCATE
5792
5908
)
5909
+ original_config_copy = copy .deepcopy (job_config )
5793
5910
5794
5911
get_table_patch = mock .patch (
5795
5912
"google.cloud.bigquery.client.Client.get_table" ,
@@ -5826,6 +5943,9 @@ def test_load_table_from_dataframe_w_custom_job_config(self):
5826
5943
assert sent_config .source_format == job .SourceFormat .PARQUET
5827
5944
assert sent_config .write_disposition == job .WriteDisposition .WRITE_TRUNCATE
5828
5945
5946
+ # the original config object should not have been modified
5947
+ assert job_config .to_api_repr () == original_config_copy .to_api_repr ()
5948
+
5829
5949
@unittest .skipIf (pandas is None , "Requires `pandas`" )
5830
5950
@unittest .skipIf (pyarrow is None , "Requires `pyarrow`" )
5831
5951
def test_load_table_from_dataframe_w_automatic_schema (self ):
@@ -6466,6 +6586,7 @@ def test_load_table_from_json_non_default_args(self):
6466
6586
]
6467
6587
job_config = job .LoadJobConfig (schema = schema )
6468
6588
job_config ._properties ["load" ]["unknown_field" ] = "foobar"
6589
+ original_config_copy = copy .deepcopy (job_config )
6469
6590
6470
6591
load_patch = mock .patch (
6471
6592
"google.cloud.bigquery.client.Client.load_table_from_file" , autospec = True
@@ -6493,13 +6614,15 @@ def test_load_table_from_json_non_default_args(self):
6493
6614
)
6494
6615
6495
6616
sent_config = load_table_from_file .mock_calls [0 ][2 ]["job_config" ]
6496
- assert job_config .source_format is None # the original was not modified
6497
6617
assert sent_config .source_format == job .SourceFormat .NEWLINE_DELIMITED_JSON
6498
6618
assert sent_config .schema == schema
6499
6619
assert not sent_config .autodetect
6500
6620
# all properties should have been cloned and sent to the backend
6501
6621
assert sent_config ._properties .get ("load" , {}).get ("unknown_field" ) == "foobar"
6502
6622
6623
+ # the original config object should not have been modified
6624
+ assert job_config .to_api_repr () == original_config_copy .to_api_repr ()
6625
+
6503
6626
def test_load_table_from_json_w_invalid_job_config (self ):
6504
6627
from google .cloud .bigquery import job
6505
6628
0 commit comments