|
3 | 3 | from __future__ import absolute_import |
4 | 4 | from __future__ import unicode_literals |
5 | 5 |
|
| 6 | +import operator |
| 7 | + |
6 | 8 | from google import auth |
7 | 9 | from google.cloud import bigquery |
8 | 10 | from google.cloud.bigquery import dbapi, QueryJobConfig |
@@ -293,6 +295,11 @@ def __init__( |
293 | 295 | def dbapi(cls): |
294 | 296 | return dbapi |
295 | 297 |
|
| 298 | + @staticmethod |
| 299 | + def _build_formatted_table_id(table): |
| 300 | + """Build '<dataset_id>.<table_id>' string using given table.""" |
| 301 | + return "{}.{}".format(table.reference.dataset_id, table.table_id) |
| 302 | + |
296 | 303 | @staticmethod |
297 | 304 | def _add_default_dataset_to_job_config(job_config, project_id, dataset_id): |
298 | 305 | # If dataset_id is set, then we know the job_config isn't None |
@@ -359,6 +366,26 @@ def _json_deserializer(self, row): |
359 | 366 | """ |
360 | 367 | return row |
361 | 368 |
|
| 369 | + def _get_table_or_view_names(self, connection, table_type, schema=None): |
| 370 | + current_schema = schema or self.dataset_id |
| 371 | + get_table_name = self._build_formatted_table_id \ |
| 372 | + if self.dataset_id is None else \ |
| 373 | + operator.attrgetter("table_id") |
| 374 | + |
| 375 | + client = connection.connection._client |
| 376 | + datasets = client.list_datasets() |
| 377 | + |
| 378 | + result = [] |
| 379 | + for dataset in datasets: |
| 380 | + if current_schema is not None and current_schema != dataset.dataset_id: |
| 381 | + continue |
| 382 | + |
| 383 | + tables = client.list_tables(dataset.reference) |
| 384 | + for table in tables: |
| 385 | + if table_type == table.table_type: |
| 386 | + result.append(get_table_name(table)) |
| 387 | + return result |
| 388 | + |
362 | 389 | @staticmethod |
363 | 390 | def _split_table_name(full_table_name): |
364 | 391 | # Split full_table_name to get project, dataset and table name |
@@ -474,23 +501,13 @@ def get_table_names(self, connection, schema=None, **kw): |
474 | 501 | if isinstance(connection, Engine): |
475 | 502 | connection = connection.connect() |
476 | 503 |
|
477 | | - datasets = connection.connection._client.list_datasets() |
478 | | - result = [] |
479 | | - for d in datasets: |
480 | | - if schema is not None and d.dataset_id != schema: |
481 | | - continue |
| 504 | + return self._get_table_or_view_names(connection, "TABLE", schema) |
482 | 505 |
|
483 | | - if self.dataset_id is not None and d.dataset_id != self.dataset_id: |
484 | | - continue |
| 506 | + def get_view_names(self, connection, schema=None, **kw): |
| 507 | + if isinstance(connection, Engine): |
| 508 | + connection = connection.connect() |
485 | 509 |
|
486 | | - tables = connection.connection._client.list_tables(d.reference) |
487 | | - for t in tables: |
488 | | - if self.dataset_id is None: |
489 | | - table_name = d.dataset_id + '.' + t.table_id |
490 | | - else: |
491 | | - table_name = t.table_id |
492 | | - result.append(table_name) |
493 | | - return result |
| 510 | + return self._get_table_or_view_names(connection, "VIEW", schema) |
494 | 511 |
|
495 | 512 | def do_rollback(self, dbapi_connection): |
496 | 513 | # BigQuery has no support for transactions. |
|
0 commit comments