|
3 | 3 | from __future__ import absolute_import |
4 | 4 | from __future__ import unicode_literals |
5 | 5 |
|
| 6 | +import operator |
| 7 | + |
6 | 8 | from google import auth |
7 | 9 | from google.cloud import bigquery |
8 | 10 | from google.cloud.bigquery import dbapi, QueryJobConfig |
@@ -283,6 +285,11 @@ def __init__( |
283 | 285 | def dbapi(cls): |
284 | 286 | return dbapi |
285 | 287 |
|
| 288 | + @staticmethod |
| 289 | + def _build_formatted_table_id(table): |
| 290 | + """Build '<dataset_id>.<table_id>' string using given table.""" |
| 291 | + return "{}.{}".format(table.reference.dataset_id, table.table_id) |
| 292 | + |
286 | 293 | @staticmethod |
287 | 294 | def _add_default_dataset_to_job_config(job_config, project_id, dataset_id): |
288 | 295 | # If dataset_id is set, then we know the job_config isn't None |
@@ -349,6 +356,26 @@ def _json_deserializer(self, row): |
349 | 356 | """ |
350 | 357 | return row |
351 | 358 |
|
| 359 | + def _get_table_or_view_names(self, connection, table_type, schema=None): |
| 360 | + current_schema = schema or self.dataset_id |
| 361 | + get_table_name = self._build_formatted_table_id \ |
| 362 | + if self.dataset_id is None else \ |
| 363 | + operator.attrgetter("table_id") |
| 364 | + |
| 365 | + client = connection.connection._client |
| 366 | + datasets = client.list_datasets() |
| 367 | + |
| 368 | + result = [] |
| 369 | + for dataset in datasets: |
| 370 | + if current_schema is not None and current_schema != dataset.dataset_id: |
| 371 | + continue |
| 372 | + |
| 373 | + tables = client.list_tables(dataset.reference) |
| 374 | + for table in tables: |
| 375 | + if table_type == table.table_type: |
| 376 | + result.append(get_table_name(table)) |
| 377 | + return result |
| 378 | + |
352 | 379 | @staticmethod |
353 | 380 | def _split_table_name(full_table_name): |
354 | 381 | # Split full_table_name to get project, dataset and table name |
@@ -464,23 +491,13 @@ def get_table_names(self, connection, schema=None, **kw): |
464 | 491 | if isinstance(connection, Engine): |
465 | 492 | connection = connection.connect() |
466 | 493 |
|
467 | | - datasets = connection.connection._client.list_datasets() |
468 | | - result = [] |
469 | | - for d in datasets: |
470 | | - if schema is not None and d.dataset_id != schema: |
471 | | - continue |
| 494 | + return self._get_table_or_view_names(connection, "TABLE", schema) |
472 | 495 |
|
473 | | - if self.dataset_id is not None and d.dataset_id != self.dataset_id: |
474 | | - continue |
| 496 | + def get_view_names(self, connection, schema=None, **kw): |
| 497 | + if isinstance(connection, Engine): |
| 498 | + connection = connection.connect() |
475 | 499 |
|
476 | | - tables = connection.connection._client.list_tables(d.reference) |
477 | | - for t in tables: |
478 | | - if self.dataset_id is None: |
479 | | - table_name = d.dataset_id + '.' + t.table_id |
480 | | - else: |
481 | | - table_name = t.table_id |
482 | | - result.append(table_name) |
483 | | - return result |
| 500 | + return self._get_table_or_view_names(connection, "VIEW", schema) |
484 | 501 |
|
485 | 502 | def do_rollback(self, dbapi_connection): |
486 | 503 | # BigQuery has no support for transactions. |
|
0 commit comments