|
27 | 27 | FileColumnClause,
|
28 | 28 | StageClause,
|
29 | 29 | )
|
| 30 | +import sqlalchemy |
| 31 | +from packaging import version |
30 | 32 |
|
31 | 33 |
|
32 | 34 | class CompileDatabendCopyIntoTableTest(fixtures.TestBase, AssertsCompiledSQL):
|
@@ -215,51 +217,52 @@ def define_tables(cls, metadata):
|
215 | 217 | Column("data", String(50)),
|
216 | 218 | )
|
217 | 219 |
|
218 |
| - def test_copy_into_stage_and_table(self, connection): |
219 |
| - # create stage |
220 |
| - connection.execute(text('CREATE OR REPLACE STAGE mystage')) |
221 |
| - # copy into stage from random table limiting 1000 |
222 |
| - table = self.tables.random_data |
223 |
| - query = table.select().limit(1000) |
| 220 | + if version.parse(sqlalchemy.__version__) >= version.parse('2.0.0'): |
| 221 | + def test_copy_into_stage_and_table(self, connection): |
| 222 | + # create stage |
| 223 | + connection.execute(text('CREATE OR REPLACE STAGE mystage')) |
| 224 | + # copy into stage from random table limiting 1000 |
| 225 | + table = self.tables.random_data |
| 226 | + query = table.select().limit(1000) |
224 | 227 |
|
225 |
| - copy_into = CopyIntoLocation( |
226 |
| - target=StageClause( |
227 |
| - name='mystage' |
228 |
| - ), |
229 |
| - from_=query, |
230 |
| - file_format=ParquetFormat(), |
231 |
| - options=CopyIntoLocationOptions() |
232 |
| - ) |
233 |
| - r = connection.execute( |
234 |
| - copy_into |
235 |
| - ) |
236 |
| - eq_(r.rowcount, 1000) |
237 |
| - copy_into_results = r.context.copy_into_location_results() |
238 |
| - eq_(copy_into_results['rows_unloaded'], 1000) |
239 |
| - # eq_(copy_into_results['input_bytes'], 16250) # input bytes will differ, the table is random |
240 |
| - # eq_(copy_into_results['output_bytes'], 4701) # output bytes differs |
| 228 | + copy_into = CopyIntoLocation( |
| 229 | + target=StageClause( |
| 230 | + name='mystage' |
| 231 | + ), |
| 232 | + from_=query, |
| 233 | + file_format=ParquetFormat(), |
| 234 | + options=CopyIntoLocationOptions() |
| 235 | + ) |
| 236 | + r = connection.execute( |
| 237 | + copy_into |
| 238 | + ) |
| 239 | + eq_(r.rowcount, 1000) |
| 240 | + copy_into_results = r.context.copy_into_location_results() |
| 241 | + eq_(copy_into_results['rows_unloaded'], 1000) |
| 242 | + # eq_(copy_into_results['input_bytes'], 16250) # input bytes will differ, the table is random |
| 243 | + # eq_(copy_into_results['output_bytes'], 4701) # output bytes differs |
241 | 244 |
|
242 |
| - # now copy into table |
| 245 | + # now copy into table |
243 | 246 |
|
244 |
| - copy_into_table = CopyIntoTable( |
245 |
| - target=self.tables.loaded, |
246 |
| - from_=StageClause( |
247 |
| - name='mystage' |
248 |
| - ), |
249 |
| - file_format=ParquetFormat(), |
250 |
| - options=CopyIntoTableOptions() |
251 |
| - ) |
252 |
| - r = connection.execute( |
253 |
| - copy_into_table |
254 |
| - ) |
255 |
| - eq_(r.rowcount, 1000) |
256 |
| - copy_into_table_results = r.context.copy_into_table_results() |
257 |
| - assert len(copy_into_table_results) == 1 |
258 |
| - result = copy_into_table_results[0] |
259 |
| - assert result['file'].endswith('.parquet') |
260 |
| - eq_(result['rows_loaded'], 1000) |
261 |
| - eq_(result['errors_seen'], 0) |
262 |
| - eq_(result['first_error'], None) |
263 |
| - eq_(result['first_error_line'], None) |
| 247 | + copy_into_table = CopyIntoTable( |
| 248 | + target=self.tables.loaded, |
| 249 | + from_=StageClause( |
| 250 | + name='mystage' |
| 251 | + ), |
| 252 | + file_format=ParquetFormat(), |
| 253 | + options=CopyIntoTableOptions() |
| 254 | + ) |
| 255 | + r = connection.execute( |
| 256 | + copy_into_table |
| 257 | + ) |
| 258 | + eq_(r.rowcount, 1000) |
| 259 | + copy_into_table_results = r.context.copy_into_table_results() |
| 260 | + assert len(copy_into_table_results) == 1 |
| 261 | + result = copy_into_table_results[0] |
| 262 | + assert result['file'].endswith('.parquet') |
| 263 | + eq_(result['rows_loaded'], 1000) |
| 264 | + eq_(result['errors_seen'], 0) |
| 265 | + eq_(result['first_error'], None) |
| 266 | + eq_(result['first_error_line'], None) |
264 | 267 |
|
265 | 268 |
|
0 commit comments