@@ -1164,18 +1164,21 @@ def test_upload_chinese_unicode_data(self):
11641164 columns = list ('ABCD' ))
11651165 df ['s' ] = u'信用卡'
11661166
1167- gbq .to_gbq (df , self .destination_table + test_id , _get_project_id (),
1168- chunksize = 10000 )
1169-
1170- result_df = gbq .read_gbq ("SELECT * FROM {0}" .format (
1171- self .destination_table + test_id ),
1172- project_id = _get_project_id ())
1167+ gbq .to_gbq (
1168+ df , self .destination_table + test_id ,
1169+ _get_project_id (),
1170+ private_key = _get_private_key_path (),
1171+ chunksize = 10000 )
1172+
1173+ result_df = gbq .read_gbq (
1174+ "SELECT * FROM {0}" .format (self .destination_table + test_id ),
1175+ project_id = _get_project_id (),
1176+ private_key = _get_private_key_path ())
11731177
11741178 assert len (result_df ) == test_size
11751179
1176- pytest .skipif (
1177- sys .version_info .major < 3 ,
1178- reason = 'Unicode comparison in Py2 not working' )
1180+ if sys .version_info .major < 3 :
1181+ pytest .skip (msg = 'Unicode comparison in Py2 not working' )
11791182
11801183 result = result_df ['s' ].sort_values ()
11811184 expected = df ['s' ].sort_values ()
@@ -1194,18 +1197,21 @@ def test_upload_other_unicode_data(self):
11941197 ]
11951198 })
11961199
1197- gbq .to_gbq (df , self .destination_table + test_id , _get_project_id (),
1198- chunksize = 10000 )
1200+ gbq .to_gbq (
1201+ df , self .destination_table + test_id ,
1202+ _get_project_id (),
1203+ private_key = _get_private_key_path (),
1204+ chunksize = 10000 )
11991205
12001206 result_df = gbq .read_gbq ("SELECT * FROM {0}" .format (
12011207 self .destination_table + test_id ),
1202- project_id = _get_project_id ())
1208+ project_id = _get_project_id (),
1209+ private_key = _get_private_key_path ())
12031210
12041211 assert len (result_df ) == test_size
12051212
1206- pytest .skipif (
1207- sys .version_info .major < 3 ,
1208- reason = 'Unicode comparison in Py2 not working' )
1213+ if sys .version_info .major < 3 :
1214+ pytest .skip (msg = 'Unicode comparison in Py2 not working' )
12091215
12101216 result = result_df ['s' ].sort_values ()
12111217 expected = df ['s' ].sort_values ()
@@ -1525,59 +1531,6 @@ def test_upload_data(self):
15251531
15261532 assert result ['num_rows' ][0 ] == test_size
15271533
1528- def test_upload_chinese_unicode_data (self ):
1529- test_id = "2"
1530- test_size = 6
1531- df = DataFrame (np .random .randn (6 , 4 ), index = range (6 ),
1532- columns = list ('ABCD' ))
1533- df ['s' ] = u'信用卡'
1534-
1535- gbq .to_gbq (df , self .destination_table + test_id , _get_project_id (),
1536- chunksize = 10000 )
1537-
1538- result_df = gbq .read_gbq ("SELECT * FROM {0}" .format (
1539- self .destination_table + test_id ),
1540- project_id = _get_project_id ())
1541-
1542- assert len (result_df ) == test_size
1543-
1544- if sys .version_info .major < 3 :
1545- pytest .skip (msg = 'Unicode comparison in Py2 not working' )
1546-
1547- result = result_df ['s' ].sort_values ()
1548- expected = df ['s' ].sort_values ()
1549-
1550- tm .assert_numpy_array_equal (expected .values , result .values )
1551-
1552- def test_upload_other_unicode_data (self ):
1553- test_id = "3"
1554- test_size = 3
1555- df = DataFrame ({
1556- 's' : ['Skywalker™' , 'lego' , 'hülle' ],
1557- 'i' : [200 , 300 , 400 ],
1558- 'd' : [
1559- '2017-12-13 17:40:39' , '2017-12-13 17:40:39' ,
1560- '2017-12-13 17:40:39'
1561- ]
1562- })
1563-
1564- gbq .to_gbq (df , self .destination_table + test_id , _get_project_id (),
1565- chunksize = 10000 )
1566-
1567- result_df = gbq .read_gbq ("SELECT * FROM {0}" .format (
1568- self .destination_table + test_id ),
1569- project_id = _get_project_id ())
1570-
1571- assert len (result_df ) == test_size
1572-
1573- if sys .version_info .major < 3 :
1574- pytest .skip (msg = 'Unicode comparison in Py2 not working' )
1575-
1576- result = result_df ['s' ].sort_values ()
1577- expected = df ['s' ].sort_values ()
1578-
1579- tm .assert_numpy_array_equal (expected .values , result .values )
1580-
15811534
15821535class TestToGBQIntegrationWithServiceAccountKeyContents (object ):
15831536 # Changes to BigQuery table schema may take up to 2 minutes as of May 2015
0 commit comments