diff --git a/materializationengine/blueprints/client/api2.py b/materializationengine/blueprints/client/api2.py index 917a337e..6b2962fe 100644 --- a/materializationengine/blueprints/client/api2.py +++ b/materializationengine/blueprints/client/api2.py @@ -71,7 +71,7 @@ from materializationengine.schemas import AnalysisTableSchema, AnalysisVersionSchema from materializationengine.utils import check_read_permission -__version__ = "5.14.0" +__version__ = "5.13.5" authorizations = { @@ -651,6 +651,13 @@ def combine_queries( if (prod_df is None) and (mat_df is None): abort(400, f"This query on table {user_data['table']} returned no results") + # if there is nothing to combine, just return the prod table to reflect + # schema with no rows + if (mat_df is None) and len(prod_df)==0: + cut_prod_df = prod_df.drop(crud_columns, axis=1,errors="ignore") + if len(created_columns) > 0: + cut_prod_df = cut_prod_df.drop(created_columns, axis=1,errors="ignore") + return cut_prod_df.reset_index() if prod_df is not None: # if we are moving forward in time diff --git a/materializationengine/blueprints/client/query.py b/materializationengine/blueprints/client/query.py index a6b1a0d2..0b46de63 100644 --- a/materializationengine/blueprints/client/query.py +++ b/materializationengine/blueprints/client/query.py @@ -85,37 +85,38 @@ def fix_columns_with_query( ): """Use a query object to suggest how to convert columns imported from csv to correct types.""" - if len(df) > 0: - n_tables = len(query.column_descriptions) + + n_tables = len(query.column_descriptions) + if n_tables == 1: + schema_model = query.column_descriptions[0]["type"] + for colname in df.columns: if n_tables == 1: - schema_model = query.column_descriptions[0]["type"] - for colname in df.columns: - if n_tables == 1: - coltype = type(getattr(schema_model, colname).type) - else: - coltype = type( - next( - col["type"] - for col in query.column_descriptions - if col["name"] == colname - ) - ) - if coltype is Boolean: - pass - # df[colname] = _fix_boolean_column(df[colname]) - elif coltype is DateTime: - # if the first entry for this column has a decimal point, - # then it is one format and we want to convert it using that format - df[colname] = pd.to_datetime( - df[colname], utc=True, format='ISO8601' - ) - - elif coltype is Geometry and fix_wkb is True: - df[colname] = fix_wkb_column( - df[colname], - wkb_data_start_ind=wkb_data_start_ind, - n_threads=n_threads, + coltype = type(getattr(schema_model, colname).type) + else: + coltype = type( + next( + col["type"] + for col in query.column_descriptions + if col["name"] == colname ) + ) + if coltype is Boolean: + pass + # df[colname] = _fix_boolean_column(df[colname]) + elif coltype is DateTime: + # if the first entry for this column has a decimal point, + # then it is one format and we want to convert it using that format + df[colname] = pd.to_datetime( + df[colname], utc=True, format='ISO8601' + ) + if len(df) > 0: + if coltype is Geometry and fix_wkb is True: + + df[colname] = fix_wkb_column( + df[colname], + wkb_data_start_ind=wkb_data_start_ind, + n_threads=n_threads, + ) elif isinstance(df[colname].loc[0], Decimal) and fix_decimal is True: df[colname] = _fix_decimal_column(df[colname]) return df