From 6599e192460883b3521b84775bfde20e8642438c Mon Sep 17 00:00:00 2001 From: victor Date: Wed, 3 Sep 2025 14:43:34 +0800 Subject: [PATCH 01/19] column_ts support nanos --- c-questdb-client | 2 +- src/questdb/ingress.pyx | 16 ++++++++++++---- test/test.py | 7 ++++--- test/test_dataframe.py | 40 ++++++++++++++++++++-------------------- 4 files changed, 37 insertions(+), 28 deletions(-) diff --git a/c-questdb-client b/c-questdb-client index 924bc390..107a3eb9 160000 --- a/c-questdb-client +++ b/c-questdb-client @@ -1 +1 @@ -Subproject commit 924bc3905388d24dbebb31dfe326fd64123cf52f +Subproject commit 107a3eb95958d0857326e35c6dbd2940c01616bf diff --git a/src/questdb/ingress.pyx b/src/questdb/ingress.pyx index 0c620f17..c8d4e4e8 100644 --- a/src/questdb/ingress.pyx +++ b/src/questdb/ingress.pyx @@ -648,7 +648,7 @@ cdef class SenderTransaction: symbols: Optional[Dict[str, Optional[str]]]=None, columns: Optional[Dict[ str, - Union[None, bool, int, float, str, TimestampMicros, datetime.datetime, numpy.ndarray]] + Union[None, bool, int, float, str, TimestampMicros, TimestampNanos, datetime.datetime, numpy.ndarray]] ]=None, at: Union[ServerTimestampType, TimestampNanos, datetime.datetime]): """ @@ -962,12 +962,18 @@ cdef class Buffer: if not line_sender_buffer_column_str(self._impl, c_name, c_value, &err): raise c_err_to_py(err) - cdef inline void_int _column_ts( + cdef inline void_int _column_ts_micros( self, line_sender_column_name c_name, TimestampMicros ts) except -1: cdef line_sender_error* err = NULL if not line_sender_buffer_column_ts_micros(self._impl, c_name, ts._value, &err): raise c_err_to_py(err) + cdef inline void_int _column_ts_nanos( + self, line_sender_column_name c_name, TimestampNanos ts) except -1: + cdef line_sender_error* err = NULL + if not line_sender_buffer_column_ts_nanos(self._impl, c_name, ts._value, &err): + raise c_err_to_py(err) + cdef inline void_int _column_numpy( self, line_sender_column_name c_name, cnp.ndarray arr) except -1: if cnp.PyArray_TYPE(arr) != cnp.NPY_FLOAT64: @@ -1020,11 +1026,13 @@ cdef class Buffer: elif PyUnicode_CheckExact(value): self._column_str(c_name, value) elif isinstance(value, TimestampMicros): - self._column_ts(c_name, value) + self._column_ts_micros(c_name, value) elif PyArray_CheckExact( value): self._column_numpy(c_name, value) elif isinstance(value, cp_datetime): self._column_dt(c_name, value) + elif isinstance(value, TimestampNanos): + self._column_ts_nanos(c_name, value) else: valid = ', '.join(( 'bool', @@ -1115,7 +1123,7 @@ cdef class Buffer: symbols: Optional[Dict[str, Optional[str]]]=None, columns: Optional[Dict[ str, - Union[None, bool, int, float, str, TimestampMicros, datetime.datetime, numpy.ndarray]] + Union[None, bool, int, float, str, TimestampMicros, TimestampNanos, datetime.datetime, numpy.ndarray]] ]=None, at: Union[ServerTimestampType, TimestampNanos, datetime.datetime]): """ diff --git a/test/test.py b/test/test.py index cb0267aa..3d6fe7f4 100755 --- a/test/test.py +++ b/test/test.py @@ -149,11 +149,12 @@ def test_column(self): 'col4': 0.5, 'col5': 'val', 'col6': qi.TimestampMicros(12345), - 'col7': two_h_after_epoch, - 'col8': None}, at=qi.ServerTimestamp) + 'col7': qi.TimestampNanos(12345678), + 'col8': two_h_after_epoch, + 'col9': None}, at=qi.ServerTimestamp) exp = ( b'tbl1 col1=t,col2=f,col3=-1i,col4' + _float_binary_bytes(0.5, self.version == 1) + - b',col5="val",col6=12345t,col7=7200000000t\n') + b',col5="val",col6=12345000n,col7=12345678n,col8=7200000000000n\n') self.assertEqual(bytes(buf), exp) def test_none_symbol(self): diff --git a/test/test_dataframe.py b/test/test_dataframe.py index df1822e2..dcdeb856 100644 --- a/test/test_dataframe.py +++ b/test/test_dataframe.py @@ -839,12 +839,12 @@ def test_datetime64_numpy_col(self): buf = _dataframe(self.version, df, table_name='tbl1', at=qi.ServerTimestamp) self.assertEqual( buf, - b'tbl1 a=1546300800000000t,b="a"\n' + - b'tbl1 a=1546300801000000t,b="b"\n' + - b'tbl1 a=1546300802000000t,b="c"\n' + - b'tbl1 a=1546300803000000t,b="d"\n' + - b'tbl1 a=1546300804000000t,b="e"\n' + - b'tbl1 a=1546300805000000t,b="f"\n' + + b'tbl1 a=1546300800000000000n,b="a"\n' + + b'tbl1 a=1546300801000000000n,b="b"\n' + + b'tbl1 a=1546300802000000000n,b="c"\n' + + b'tbl1 a=1546300803000000000n,b="d"\n' + + b'tbl1 a=1546300804000000000n,b="e"\n' + + b'tbl1 a=1546300805000000000n,b="f"\n' + b'tbl1 b="g"\n' + b'tbl1 b="h"\n' + b'tbl1 b="i"\n') @@ -856,9 +856,9 @@ def test_datetime64_numpy_col(self): buf = _dataframe(self.version, df, table_name='tbl1', at=qi.ServerTimestamp) self.assertEqual( buf, - b'tbl1 a=0t\n' + - b'tbl1 a=1000000t\n' + - b'tbl1 a=2000000t\n') + b'tbl1 a=0n\n' + + b'tbl1 a=1000000000n\n' + + b'tbl1 a=2000000000n\n') def test_datetime64_tz_arrow_col(self): df = pd.DataFrame({ @@ -878,10 +878,10 @@ def test_datetime64_tz_arrow_col(self): self.assertEqual( buf, # Note how these are 5hr offset from `test_datetime64_numpy_col`. - b'tbl1,b=sym1 a=1546318800000000t\n' + - b'tbl1,b=sym2 a=1546318801000000t\n' + + b'tbl1,b=sym1 a=1546318800000000000n\n' + + b'tbl1,b=sym2 a=1546318801000000000n\n' + b'tbl1,b=sym3\n' + - b'tbl1,b=sym4 a=1546318803000000t\n') + b'tbl1,b=sym4 a=1546318803000000000n\n') # Not epoch 0. df = pd.DataFrame({ @@ -900,9 +900,9 @@ def test_datetime64_tz_arrow_col(self): self.assertEqual( buf, # Note how these are 5hr offset from `test_datetime64_numpy_col`. - b'tbl1,b=sym1 a=18000000000t\n' + - b'tbl1,b=sym2 a=18001000000t\n' + - b'tbl1,b=sym3 a=18002000000t\n') + b'tbl1,b=sym1 a=18000000000000n\n' + + b'tbl1,b=sym2 a=18001000000000n\n' + + b'tbl1,b=sym3 a=18002000000000n\n') # Actual epoch 0. df = pd.DataFrame({ @@ -920,9 +920,9 @@ def test_datetime64_tz_arrow_col(self): buf = _dataframe(self.version, df, table_name='tbl1', symbols=['b'], at=qi.ServerTimestamp) self.assertEqual( buf, - b'tbl1,b=sym1 a=0t\n' + - b'tbl1,b=sym2 a=1000000t\n' + - b'tbl1,b=sym3 a=2000000t\n') + b'tbl1,b=sym1 a=0n\n' + + b'tbl1,b=sym2 a=1000000000n\n' + + b'tbl1,b=sym3 a=2000000000n\n') df2 = pd.DataFrame({ 'a': [ @@ -936,8 +936,8 @@ def test_datetime64_tz_arrow_col(self): # Mostly, here assert that negative timestamps are allowed. self.assertIn( buf, - [b'tbl1,b=sym1 a=-2208970800000000t\n', - b'tbl1,b=sym1 a=-2208971040000000t\n']) + [b'tbl1,b=sym1 a=-2208970800000000000n\n', + b'tbl1,b=sym1 a=-2208971040000000000n\n']) def test_datetime64_numpy_at(self): df = pd.DataFrame({ From fe6b3c91a28548934c8dea75d03bf8e34a16aa48 Mon Sep 17 00:00:00 2001 From: victor Date: Wed, 3 Sep 2025 14:50:27 +0800 Subject: [PATCH 02/19] update c lib. --- c-questdb-client | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/c-questdb-client b/c-questdb-client index 107a3eb9..fdb62619 160000 --- a/c-questdb-client +++ b/c-questdb-client @@ -1 +1 @@ -Subproject commit 107a3eb95958d0857326e35c6dbd2940c01616bf +Subproject commit fdb62619f5e1390333fadc9de9077608492a2786 From ce5373de15542221b79bab66b1400fdd02cfe93e Mon Sep 17 00:00:00 2001 From: victor Date: Fri, 12 Sep 2025 11:38:48 +0800 Subject: [PATCH 03/19] add support for TimestampNanos in ingress and update test cases --- c-questdb-client | 2 +- src/questdb/ingress.pyx | 4 ++-- test/test.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/c-questdb-client b/c-questdb-client index fdb62619..87a17ca2 160000 --- a/c-questdb-client +++ b/c-questdb-client @@ -1 +1 @@ -Subproject commit fdb62619f5e1390333fadc9de9077608492a2786 +Subproject commit 87a17ca22ef92d7cb6f229b362e8e61b63af543d diff --git a/src/questdb/ingress.pyx b/src/questdb/ingress.pyx index c8d4e4e8..0179c59c 100644 --- a/src/questdb/ingress.pyx +++ b/src/questdb/ingress.pyx @@ -1027,12 +1027,12 @@ cdef class Buffer: self._column_str(c_name, value) elif isinstance(value, TimestampMicros): self._column_ts_micros(c_name, value) + elif isinstance(value, TimestampNanos): + self._column_ts_nanos(c_name, value) elif PyArray_CheckExact( value): self._column_numpy(c_name, value) elif isinstance(value, cp_datetime): self._column_dt(c_name, value) - elif isinstance(value, TimestampNanos): - self._column_ts_nanos(c_name, value) else: valid = ', '.join(( 'bool', diff --git a/test/test.py b/test/test.py index 3d6fe7f4..49abeacf 100755 --- a/test/test.py +++ b/test/test.py @@ -154,7 +154,7 @@ def test_column(self): 'col9': None}, at=qi.ServerTimestamp) exp = ( b'tbl1 col1=t,col2=f,col3=-1i,col4' + _float_binary_bytes(0.5, self.version == 1) + - b',col5="val",col6=12345000n,col7=12345678n,col8=7200000000000n\n') + b',col5="val",col6=12345t,col7=12345678n,col8=7200000000t\n') self.assertEqual(bytes(buf), exp) def test_none_symbol(self): From 3e885bc558fdab23628c7d24b9b8b9d6c6b145ae Mon Sep 17 00:00:00 2001 From: Adam Cimarosti Date: Tue, 30 Sep 2025 16:16:39 +0100 Subject: [PATCH 04/19] repointed submodule --- c-questdb-client | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/c-questdb-client b/c-questdb-client index 87a17ca2..b3f08faa 160000 --- a/c-questdb-client +++ b/c-questdb-client @@ -1 +1 @@ -Subproject commit 87a17ca22ef92d7cb6f229b362e8e61b63af543d +Subproject commit b3f08faafd10bf555d873080a8ac4604a807c011 From ad1a02846263b192ac1a2a1f114816b0d35951b7 Mon Sep 17 00:00:00 2001 From: Adam Cimarosti Date: Tue, 30 Sep 2025 16:41:08 +0100 Subject: [PATCH 05/19] changelog --- CHANGELOG.rst | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 656b4177..693b3391 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,42 @@ Changelog ========= +3.1.0 (2025-09-30) +------------------ + +Features +~~~~~~~~ + +From QuestDB 9.1.0 you can use ``CREATE TABLE`` SQL statements with +``TIMESTAMP_NANO`` column types, and/or configure the database to use nanosecond +precision designated timestamp columns by setting the +``line.timestamp.default.column.type=TIMESTAMP_NANO`` config option in +``server.conf``. + +This client release adds support for sending nanoseconds timestamps to the +server without loss of precision. + +The change is backwards compatible with older QuestDB releases and does not +introduce new APIs, but the sender/buffer's ``.row()`` API can now additionally +accept nanosecond precision. + +.. code-block:: python + + conf = f'http::addr=localhost:9000;' + with Sender.from_conf(conf) as sender: + sender.row( + 'trade_executions', + symbols={ + 'product': 'VOD.L', + 'parent_order': '65d1ba36-390e-49a2-93e3-a05ef004b5ff' + 'side': 'buy'}, + columns={ + 'order_sent': TimestampNanos(1759246702031355012)}, + at=TimestampNanos(1759246702909423071)) + +If you're using dataframes, nanosecond timestamps are now also transferred with +full precision. + 3.0.0 (2025-07-07) ------------------ From aa6818cf07bcc04097bde69e8e4ab803188f1474 Mon Sep 17 00:00:00 2001 From: Adam Cimarosti Date: Tue, 30 Sep 2025 17:05:21 +0100 Subject: [PATCH 06/19] readme tweak --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 11a989d4..7fcc18dd 100644 --- a/README.rst +++ b/README.rst @@ -18,7 +18,7 @@ and full-connection encryption with Install ======= -The latest version of the library is **3.0.0** (`changelog `_). +The latest version of the library is 3.0.0 (`changelog `_). :: From f094349876e3f61c66abb7ca72d0bbc520ee0bd7 Mon Sep 17 00:00:00 2001 From: Adam Cimarosti Date: Tue, 30 Sep 2025 17:05:49 +0100 Subject: [PATCH 07/19] version bump --- .bumpversion.toml | 2 +- README.rst | 2 +- docs/conf.py | 2 +- pyproject.toml | 2 +- setup.py | 2 +- src/questdb/__init__.py | 2 +- src/questdb/ingress.pyx | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.bumpversion.toml b/.bumpversion.toml index afd41c23..4fcf859d 100644 --- a/.bumpversion.toml +++ b/.bumpversion.toml @@ -1,5 +1,5 @@ [tool.bumpversion] -current_version = "3.0.0" +current_version = "3.1.0" commit = false tag = false diff --git a/README.rst b/README.rst index 7fcc18dd..91ddb06d 100644 --- a/README.rst +++ b/README.rst @@ -18,7 +18,7 @@ and full-connection encryption with Install ======= -The latest version of the library is 3.0.0 (`changelog `_). +The latest version of the library is 3.1.0 (`changelog `_). :: diff --git a/docs/conf.py b/docs/conf.py index 3fce6413..dedf4b01 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -28,7 +28,7 @@ year = '2024' author = 'QuestDB' copyright = '{0}, {1}'.format(year, author) -version = release = '3.0.0' +version = release = '3.1.0' github_repo_url = 'https://github.com/questdb/py-questdb-client' diff --git a/pyproject.toml b/pyproject.toml index 72487a56..fead5853 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,7 +2,7 @@ # See: https://packaging.python.org/en/latest/specifications/declaring-project-metadata/ name = "questdb" requires-python = ">=3.9" -version = "3.0.0" +version = "3.1.0" description = "QuestDB client library for Python" readme = "README.rst" classifiers = [ diff --git a/setup.py b/setup.py index da7b9ecb..65aaab11 100755 --- a/setup.py +++ b/setup.py @@ -171,7 +171,7 @@ def readme(): setup( name='questdb', - version='3.0.0', + version='3.1.0', platforms=['any'], python_requires='>=3.8', install_requires=[], diff --git a/src/questdb/__init__.py b/src/questdb/__init__.py index 4eb28e38..7f5601d9 100644 --- a/src/questdb/__init__.py +++ b/src/questdb/__init__.py @@ -1 +1 @@ -__version__ = '3.0.0' +__version__ = '3.1.0' diff --git a/src/questdb/ingress.pyx b/src/questdb/ingress.pyx index 0179c59c..04d39634 100644 --- a/src/questdb/ingress.pyx +++ b/src/questdb/ingress.pyx @@ -104,7 +104,7 @@ cnp.import_array() # This value is automatically updated by the `bump2version` tool. # If you need to update it, also update the search definition in # .bumpversion.cfg. -VERSION = '3.0.0' +VERSION = '3.1.0' WARN_HIGH_RECONNECTS = True From a861d98fbc0a593639e49fd8d9a7da226ef50dad Mon Sep 17 00:00:00 2001 From: Adam Cimarosti Date: Wed, 1 Oct 2025 09:03:55 +0100 Subject: [PATCH 08/19] date bump in changelog --- CHANGELOG.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 693b3391..dd427767 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,7 +5,7 @@ Changelog ========= -3.1.0 (2025-09-30) +3.1.0 (2025-10-01) ------------------ Features From e172cea303dc8b3ea3fce1b80bddf19bbfa41582 Mon Sep 17 00:00:00 2001 From: Adam Cimarosti Date: Wed, 1 Oct 2025 11:13:57 +0100 Subject: [PATCH 09/19] attempting to add macos arm runner --- ci/cibuildwheel.yaml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/ci/cibuildwheel.yaml b/ci/cibuildwheel.yaml index 2f684b5d..88599e33 100644 --- a/ci/cibuildwheel.yaml +++ b/ci/cibuildwheel.yaml @@ -141,6 +141,21 @@ stages: - task: PublishBuildArtifacts@1 inputs: {pathtoPublish: wheelhouse} + - job: macos_arm64 + pool: {vmImage: 'macOS-15'} + timeoutInMinutes: 90 + steps: + - task: UsePythonVersion@0 + - bash: | + set -o errexit + python3 -m pip install --upgrade pip + python3 -m pip install cibuildwheel + displayName: Install dependencies + - bash: cibuildwheel --output-dir wheelhouse . + displayName: Build wheels + - task: PublishBuildArtifacts@1 + inputs: {pathtoPublish: wheelhouse} + - job: windows_i686 pool: {vmImage: 'windows-2019'} timeoutInMinutes: 90 From c092a71a3d462da60a7b69186f5b21125e039dae Mon Sep 17 00:00:00 2001 From: Adam Cimarosti Date: Wed, 1 Oct 2025 15:26:56 +0100 Subject: [PATCH 10/19] revert change, arm mac ci still unavailable --- ci/cibuildwheel.yaml | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/ci/cibuildwheel.yaml b/ci/cibuildwheel.yaml index 88599e33..2f684b5d 100644 --- a/ci/cibuildwheel.yaml +++ b/ci/cibuildwheel.yaml @@ -141,21 +141,6 @@ stages: - task: PublishBuildArtifacts@1 inputs: {pathtoPublish: wheelhouse} - - job: macos_arm64 - pool: {vmImage: 'macOS-15'} - timeoutInMinutes: 90 - steps: - - task: UsePythonVersion@0 - - bash: | - set -o errexit - python3 -m pip install --upgrade pip - python3 -m pip install cibuildwheel - displayName: Install dependencies - - bash: cibuildwheel --output-dir wheelhouse . - displayName: Build wheels - - task: PublishBuildArtifacts@1 - inputs: {pathtoPublish: wheelhouse} - - job: windows_i686 pool: {vmImage: 'windows-2019'} timeoutInMinutes: 90 From 2c06fca38ccb84210a054aa9ccaa289fc3df9243 Mon Sep 17 00:00:00 2001 From: Adam Cimarosti Date: Thu, 2 Oct 2025 10:37:41 +0100 Subject: [PATCH 11/19] changelog --- CHANGELOG.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index dd427767..716e2c31 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,7 +5,7 @@ Changelog ========= -3.1.0 (2025-10-01) +3.1.0 (2025-10-02) ------------------ Features From 84bbbf989032f2b8f8819320620d209cac423d9b Mon Sep 17 00:00:00 2001 From: Adam Cimarosti Date: Thu, 2 Oct 2025 11:26:41 +0100 Subject: [PATCH 12/19] bumping timeouts to compensate for very slow mac ci --- ci/run_tests_pipeline.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ci/run_tests_pipeline.yaml b/ci/run_tests_pipeline.yaml index 713816e2..cc2d5e5f 100644 --- a/ci/run_tests_pipeline.yaml +++ b/ci/run_tests_pipeline.yaml @@ -28,7 +28,7 @@ stages: pool: name: $(poolName) vmImage: $(imageName) - timeoutInMinutes: 45 + timeoutInMinutes: 60 steps: - checkout: self fetchDepth: 1 @@ -74,7 +74,7 @@ stages: pool: name: "Azure Pipelines" vmImage: "ubuntu-latest" - timeoutInMinutes: 45 + timeoutInMinutes: 60 steps: - checkout: self fetchDepth: 1 @@ -98,7 +98,7 @@ stages: pool: name: "Azure Pipelines" vmImage: "ubuntu-latest" - timeoutInMinutes: 45 + timeoutInMinutes: 60 steps: - checkout: self fetchDepth: 1 From a3235fd31f09a3cd0d50ec4afb3998602401dfb0 Mon Sep 17 00:00:00 2001 From: Adam Cimarosti Date: Thu, 2 Oct 2025 13:52:50 +0100 Subject: [PATCH 13/19] so slow --- ci/run_tests_pipeline.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ci/run_tests_pipeline.yaml b/ci/run_tests_pipeline.yaml index cc2d5e5f..230b4873 100644 --- a/ci/run_tests_pipeline.yaml +++ b/ci/run_tests_pipeline.yaml @@ -28,7 +28,7 @@ stages: pool: name: $(poolName) vmImage: $(imageName) - timeoutInMinutes: 60 + timeoutInMinutes: 90 steps: - checkout: self fetchDepth: 1 @@ -74,7 +74,7 @@ stages: pool: name: "Azure Pipelines" vmImage: "ubuntu-latest" - timeoutInMinutes: 60 + timeoutInMinutes: 90 steps: - checkout: self fetchDepth: 1 @@ -98,7 +98,7 @@ stages: pool: name: "Azure Pipelines" vmImage: "ubuntu-latest" - timeoutInMinutes: 60 + timeoutInMinutes: 90 steps: - checkout: self fetchDepth: 1 From 029d50fba8e50acad49f8a10add6bbc499f0ac3d Mon Sep 17 00:00:00 2001 From: Adam Cimarosti Date: Fri, 3 Oct 2025 17:23:54 +0100 Subject: [PATCH 14/19] changelog --- CHANGELOG.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 716e2c31..30150488 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,16 +5,16 @@ Changelog ========= -3.1.0 (2025-10-02) +3.1.0 (2025-10-06) ------------------ Features ~~~~~~~~ From QuestDB 9.1.0 you can use ``CREATE TABLE`` SQL statements with -``TIMESTAMP_NANO`` column types, and/or configure the database to use nanosecond +``TIMESTAMP_NS`` column types, and/or configure the database to use nanosecond precision designated timestamp columns by setting the -``line.timestamp.default.column.type=TIMESTAMP_NANO`` config option in +``line.timestamp.default.column.type=TIMESTAMP_NS`` config option in ``server.conf``. This client release adds support for sending nanoseconds timestamps to the From f8b1a5d1c82a27960138ac2531754a1f856670c9 Mon Sep 17 00:00:00 2001 From: Adam Cimarosti Date: Wed, 8 Oct 2025 17:52:00 +0100 Subject: [PATCH 15/19] updated submodule dep and fixed up test cases --- c-questdb-client | 2 +- src/questdb/ingress.pyx | 2 + test/test.py | 77 ++++++++++++++----------- test/test_dataframe.py | 121 +++++++++++++++++++++------------------- test/test_tools.py | 52 ++++++++++++++++- 5 files changed, 163 insertions(+), 91 deletions(-) diff --git a/c-questdb-client b/c-questdb-client index b3f08faa..a1c0a581 160000 --- a/c-questdb-client +++ b/c-questdb-client @@ -1 +1 @@ -Subproject commit b3f08faafd10bf555d873080a8ac4604a807c011 +Subproject commit a1c0a581ab315fb1ffd5d3d4b0d886d88d487c36 diff --git a/src/questdb/ingress.pyx b/src/questdb/ingress.pyx index 04d39634..2b1d0807 100644 --- a/src/questdb/ingress.pyx +++ b/src/questdb/ingress.pyx @@ -1010,6 +1010,8 @@ cdef class Buffer: cdef inline void_int _column_dt( self, line_sender_column_name c_name, cp_datetime dt) except -1: cdef line_sender_error* err = NULL + # We limit ourselves to micros, since this is the maxium precision + # exposed by the datetime library in Python. if not line_sender_buffer_column_ts_micros( self._impl, c_name, datetime_to_micros(dt), &err): raise c_err_to_py(err) diff --git a/test/test.py b/test/test.py index 49abeacf..39a02e2e 100755 --- a/test/test.py +++ b/test/test.py @@ -14,7 +14,10 @@ import patch_path -from test_tools import _float_binary_bytes, _array_binary_bytes +from test_tools import ( + _float_binary_bytes, + _array_binary_bytes, + TimestampEncodingMixin) PROJ_ROOT = patch_path.PROJ_ROOT sys.path.append(str(PROJ_ROOT / 'c-questdb-client' / 'system_test')) @@ -72,7 +75,7 @@ class TestBases: The discoverable subclasses can drive extra parameters. """ - class TestBuffer(unittest.TestCase): + class TestBuffer(unittest.TestCase, TimestampEncodingMixin): def test_buffer_row_at_disallows_none(self): with self.assertRaisesRegex( qi.IngressError, @@ -152,9 +155,11 @@ def test_column(self): 'col7': qi.TimestampNanos(12345678), 'col8': two_h_after_epoch, 'col9': None}, at=qi.ServerTimestamp) + et = self.enc_ts_t + en = self.enc_ts_n exp = ( b'tbl1 col1=t,col2=f,col3=-1i,col4' + _float_binary_bytes(0.5, self.version == 1) + - b',col5="val",col6=12345t,col7=12345678n,col8=7200000000t\n') + f',col5="val",col6={et(12345)},col7={en(12345678)},col8={et(7200000000)}\n'.encode()) self.assertEqual(bytes(buf), exp) def test_none_symbol(self): @@ -301,7 +306,7 @@ def test_int_range(self): with self.assertRaises(OverflowError): buf.row('tbl1', columns={'num': -2 ** 63 - 1}, at=qi.ServerTimestamp) - class TestSender(unittest.TestCase): + class TestSender(unittest.TestCase, TimestampEncodingMixin): def test_transaction_row_at_disallows_none(self): with HttpServer() as server, self.builder('http', '127.0.0.1', server.port) as sender: with self.assertRaisesRegex( @@ -385,8 +390,8 @@ def test_basic(self): msgs = server.recv() self.assertEqual(msgs, [ (b'tab1,t1=val1,t2=val2 ' - b'f1=t,f2=12345i,f3' + _float_binary_bytes(10.75) + b',f4="val3" ' - b'111222233333'), + b'f1=t,f2=12345i,f3' + _float_binary_bytes(10.75) + b',f4="val3" ' + + self.enc_des_ts_n(111222233333, v=2).encode()), b'tab1,tag3=value\\ 3,tag4=value:4 field5=f']) def test_bad_protocol_versions(self): @@ -514,8 +519,8 @@ def test_two_rows_explicit_buffer(self): columns={'price': '111222233343i', 'qty': 2.5}, at=qi.TimestampNanos(111222233343)) exp = ( - b'line_sender_buffer_example2,id=Hola price="111222233333i",qty' + _float_binary_bytes(3.5) + b' 111222233333\n' - b'line_sender_example,id=Adios price="111222233343i",qty' + _float_binary_bytes(2.5) + b' 111222233343\n') + b'line_sender_buffer_example2,id=Hola price="111222233333i",qty' + _float_binary_bytes(3.5) + b' 111222233333n\n' + b'line_sender_example,id=Adios price="111222233343i",qty' + _float_binary_bytes(2.5) + b' 111222233343n\n') self.assertEqual(bytes(buffer), exp) sender.flush(buffer) msgs = server.recv() @@ -756,9 +761,10 @@ def test_transaction_over_tcp(self): def test_transaction_basic(self): ts = qi.TimestampNanos.now() + e = lambda ts: self.enc_des_ts(ts, v=2) expected = ( - f'table_name,sym1=val1 {ts.value}\n' + - f'table_name,sym2=val2 {ts.value}\n').encode('utf-8') + f'table_name,sym1=val1 {e(ts)}\n' + + f'table_name,sym2=val2 {e(ts)}\n').encode('utf-8') with HttpServer() as server, self.builder('http', '127.0.0.1', server.port) as sender: with sender.transaction('table_name') as txn: self.assertIs(txn.row(symbols={'sym1': 'val1'}, at=ts), txn) @@ -769,9 +775,10 @@ def test_transaction_basic(self): @unittest.skipIf(not pd, 'pandas not installed') def test_transaction_basic_df(self): ts = qi.TimestampNanos.now() + e = lambda num: self.enc_des_ts(num, v=2) expected = ( - f'table_name,sym1=val1 {ts.value}\n' + - f'table_name,sym2=val2 {ts.value}\n').encode('utf-8') + f'table_name,sym1=val1 {e(ts)}\n' + + f'table_name,sym2=val2 {e(ts)}\n').encode('utf-8') with HttpServer() as server, self.builder('http', '127.0.0.1', server.port) as sender: with sender.transaction('table_name') as txn: df = pd.DataFrame({'sym1': ['val1', None], 'sym2': [None, 'val2']}) @@ -781,9 +788,10 @@ def test_transaction_basic_df(self): def test_transaction_no_auto_flush(self): ts = qi.TimestampNanos.now() + e = lambda ts: self.enc_des_ts(ts, v=2) expected = ( - f'table_name,sym1=val1 {ts.value}\n' + - f'table_name,sym2=val2 {ts.value}\n').encode('utf-8') + f'table_name,sym1=val1 {e(ts)}\n' + + f'table_name,sym2=val2 {e(ts)}\n').encode('utf-8') with HttpServer() as server, self.builder('http', '127.0.0.1', server.port, auto_flush=False) as sender: with sender.transaction('table_name') as txn: txn.row(symbols={'sym1': 'val1'}, at=ts) @@ -794,9 +802,10 @@ def test_transaction_no_auto_flush(self): @unittest.skipIf(not pd, 'pandas not installed') def test_transaction_no_auto_flush_df(self): ts = qi.TimestampNanos.now() + e = lambda ts: self.enc_des_ts(ts, v=2) expected = ( - f'table_name,sym1=val1 {ts.value}\n' + - f'table_name,sym2=val2 {ts.value}\n').encode('utf-8') + f'table_name,sym1=val1 {e(ts)}\n' + + f'table_name,sym2=val2 {e(ts)}\n').encode('utf-8') with HttpServer() as server, self.builder('http', '127.0.0.1', server.port, auto_flush=False) as sender: with sender.transaction('table_name') as txn: df = pd.DataFrame({'sym1': ['val1', None], 'sym2': [None, 'val2']}) @@ -806,12 +815,13 @@ def test_transaction_no_auto_flush_df(self): def test_transaction_auto_flush_pending_buf(self): ts = qi.TimestampNanos.now() + e = lambda ts: self.enc_des_ts(ts, v=2) expected1 = ( - f'tbl1,sym1=val1 {ts.value}\n' + - f'tbl1,sym2=val2 {ts.value}\n').encode('utf-8') + f'tbl1,sym1=val1 {e(ts)}\n' + + f'tbl1,sym2=val2 {e(ts)}\n').encode('utf-8') expected2 = ( - f'tbl2,sym3=val3 {ts.value}\n' + - f'tbl2,sym4=val4 {ts.value}\n').encode('utf-8') + f'tbl2,sym3=val3 {e(ts)}\n' + + f'tbl2,sym4=val4 {e(ts)}\n').encode('utf-8') with HttpServer() as server, self.builder('http', '127.0.0.1', server.port, auto_flush=True) as sender: self.assertIs(sender.row('tbl1', symbols={'sym1': 'val1'}, at=ts), sender) self.assertIs(sender.row('tbl1', symbols={'sym2': 'val2'}, at=ts), sender) @@ -836,11 +846,12 @@ def test_transaction_no_auto_flush_pending_buf(self): def test_transaction_immediate_auto_flush(self): ts = qi.TimestampNanos.now() - expected1 = f'tbl1,sym1=val1 {ts.value}\n'.encode('utf-8') - expected2 = f'tbl2,sym2=val2 {ts.value}\n'.encode('utf-8') + e = lambda num: self.enc_des_ts(num, v=2) + expected1 = f'tbl1,sym1=val1 {e(ts)}\n'.encode('utf-8') + expected2 = f'tbl2,sym2=val2 {e(ts)}\n'.encode('utf-8') expected3 = ( - f'tbl3,sym3=val3 {ts.value}\n' + - f'tbl3,sym4=val4 {ts.value}\n').encode('utf-8') + f'tbl3,sym3=val3 {e(ts)}\n' + + f'tbl3,sym4=val4 {e(ts)}\n').encode('utf-8') with HttpServer() as server, self.builder('http', '127.0.0.1', server.port, auto_flush_rows=1) as sender: self.assertIs(sender.row('tbl1', symbols={'sym1': 'val1'}, at=ts), sender) self.assertIs(sender.row('tbl2', symbols={'sym2': 'val2'}, at=ts), sender) @@ -856,11 +867,12 @@ def test_transaction_immediate_auto_flush(self): @unittest.skipIf(not pd, 'pandas not installed') def test_transaction_immediate_auto_flush_df(self): ts = qi.TimestampNanos.now() - expected1 = f'tbl1,sym1=val1 {ts.value}\n'.encode('utf-8') - expected2 = f'tbl2,sym2=val2 {ts.value}\n'.encode('utf-8') + e = lambda ts: self.enc_des_ts(ts, v=2) + expected1 = f'tbl1,sym1=val1 {e(ts)}\n'.encode('utf-8') + expected2 = f'tbl2,sym2=val2 {e(ts)}\n'.encode('utf-8') expected3 = ( - f'tbl3,sym3=val3 {ts.value}\n' + - f'tbl3,sym4=val4 {ts.value}\n').encode('utf-8') + f'tbl3,sym3=val3 {e(ts)}\n' + + f'tbl3,sym4=val4 {e(ts)}\n').encode('utf-8') with HttpServer() as server, self.builder('http', '127.0.0.1', server.port, auto_flush_rows=1) as sender: self.assertIs(sender.row('tbl1', symbols={'sym1': 'val1'}, at=ts), sender) self.assertIs(sender.row('tbl2', symbols={'sym2': 'val2'}, at=ts), sender) @@ -1133,8 +1145,9 @@ def _test_sender_http_auto_protocol_version(self, settings, expected_version: in symbols={'id': 'Hola'}, columns={'price': '111222233333i', 'qty': 3.5}, at=qi.TimestampNanos(111222233333)) + e = lambda num: self.enc_des_ts_n(num, v=expected_version) exp = b'line_sender_buffer_old_server2,id=Hola price="111222233333i",qty' + _float_binary_bytes( - 3.5, expected_version == 1) + b' 111222233333\n' + 3.5, expected_version == 1) + f' {e(111222233333)}\n'.encode() self.assertEqual(bytes(buffer), exp) sender.flush(buffer) self.assertEqual(len(server.requests), 1) @@ -1184,7 +1197,7 @@ def test_line_protocol_version_on_tcp(self): symbols={'id': 'Hola'}, columns={'qty': 3.5}, at=qi.TimestampNanos(111222233333)) - exp = b'line_sender_buffer_tcp_v1,id=Hola qty' + _float_binary_bytes(3.5) + b' 111222233333\n' + exp = b'line_sender_buffer_tcp_v1,id=Hola qty' + _float_binary_bytes(3.5) + b' 111222233333n\n' self.assertEqual(bytes(buffer), exp) sender.flush(buffer) self.assertEqual(server.recv()[0] + b'\n', exp) @@ -1210,7 +1223,7 @@ def _test_array_basic(self, arr: np.ndarray): 'array_test', columns={'array': arr}, at=qi.TimestampNanos(11111)) - exp = b'array_test array=' + _array_binary_bytes(arr) + b' 11111\n' + exp = b'array_test array=' + _array_binary_bytes(arr) + b' 11111n\n' sender.flush() self.assertEqual(len(server.requests), 1) self.assertEqual(server.requests[0], exp) @@ -1223,7 +1236,7 @@ def _test_array_basic(self, arr: np.ndarray): 'array_test', columns={'array': arr}, at=qi.TimestampNanos(11111)) - exp = b'array_test array=' + _array_binary_bytes(arr) + b' 11111\n' + exp = b'array_test array=' + _array_binary_bytes(arr) + b' 11111n\n' self.assertEqual(bytes(sender), exp) sender.flush() self.assertEqual(server.recv()[0] + b'\n', exp) diff --git a/test/test_dataframe.py b/test/test_dataframe.py index dcdeb856..66bbd71d 100644 --- a/test/test_dataframe.py +++ b/test/test_dataframe.py @@ -8,7 +8,7 @@ import functools import tempfile import pathlib -from test_tools import _float_binary_bytes, _array_binary_bytes +from test_tools import _float_binary_bytes, _array_binary_bytes, TimestampEncodingMixin BROKEN_TIMEZONES = True @@ -89,7 +89,7 @@ def wrapper(self, *args, **kwargs): return wrapper class TestPandasBase: - class TestPandas(unittest.TestCase): + class TestPandas(unittest.TestCase, TimestampEncodingMixin): def test_mandatory_at_dataframe(self): with self.assertRaisesRegex(TypeError, "needs keyword-only argument at"): _dataframe(self.version, []) @@ -204,11 +204,12 @@ def test_basic(self): table_name_col='T', symbols=['A', 'B', 'C', 'D'], at=-1) - self.assertEqual( - buf, - b't1,A=a1,B=b1,C=b1,D=a1 E' + _float_binary_bytes(1.0, self.version == 1) + b',F=1i 1520640000000000000\n' + - b't2,A=a2,D=a2 E' + _float_binary_bytes(2.0, self.version == 1) + b',F=2i 1520726400000000000\n' + - b't1,A=a3,B=b3,C=b3,D=a3 E' + _float_binary_bytes(3.0, self.version == 1) + b',F=3i 1520812800000000000\n') + e = self.enc_des_ts_n + exp = ( + b't1,A=a1,B=b1,C=b1,D=a1 E' + _float_binary_bytes(1.0, self.version == 1) + f',F=1i {e(1520640000000000000)}\n'.encode() + + b't2,A=a2,D=a2 E' + _float_binary_bytes(2.0, self.version == 1) + f',F=2i {e(1520726400000000000)}\n'.encode() + + b't1,A=a3,B=b3,C=b3,D=a3 E' + _float_binary_bytes(3.0, self.version == 1) + f',F=3i {e(1520812800000000000)}\n'.encode()) + self.assertEqual(buf, exp) def test_basic_with_arrays(self): if self.version == 1: @@ -219,11 +220,12 @@ def test_basic_with_arrays(self): table_name_col='T', symbols=['A', 'B', 'C', 'D'], at=-1) - self.assertEqual( - buf, - b't1,A=a1,B=b1,C=b1,D=a1 E' + _float_binary_bytes(1.0, self.version == 1) + b',F=1i,G=' + _array_binary_bytes(np.array([1.0])) + b' 1520640000000000000\n' + - b't2,A=a2,D=a2 E' + _float_binary_bytes(2.0, self.version == 1) + b',F=2i,G=' + _array_binary_bytes(np.array([10.0])) + b' 1520726400000000000\n' + - b't1,A=a3,B=b3,C=b3,D=a3 E' + _float_binary_bytes(3.0, self.version == 1) + b',F=3i,G=' + _array_binary_bytes(np.array([100.0])) + b' 1520812800000000000\n') + e = self.enc_des_ts_n + exp = ( + b't1,A=a1,B=b1,C=b1,D=a1 E' + _float_binary_bytes(1.0, self.version == 1) + b',F=1i,G=' + _array_binary_bytes(np.array([1.0])) + f' {e(1520640000000000000)}\n'.encode() + + b't2,A=a2,D=a2 E' + _float_binary_bytes(2.0, self.version == 1) + b',F=2i,G=' + _array_binary_bytes(np.array([10.0])) + f' {e(1520726400000000000)}\n'.encode() + + b't1,A=a3,B=b3,C=b3,D=a3 E' + _float_binary_bytes(3.0, self.version == 1) + b',F=3i,G=' + _array_binary_bytes(np.array([100.0])) + f' {e(1520812800000000000)}\n'.encode()) + self.assertEqual(buf, exp) def test_named_dataframe(self): df = pd.DataFrame({ @@ -276,13 +278,14 @@ def test_at_good(self): t6 = qi.TimestampNanos.from_datetime(t2) t7 = qi.TimestampNanos.from_datetime(t3) timestamps = [t1, t2, t3, t4, t5, t6, t7] + e = self.enc_des_ts_n for ts in timestamps: buf = _dataframe(self.version, df, table_name='tbl1', at=ts) self.assertEqual( buf, - b'tbl1 a=1i,b="a" 1520640000000000000\n' + - b'tbl1 a=2i,b="b" 1520640000000000000\n' + - b'tbl1 a=3i,b="c" 1520640000000000000\n') + f'tbl1 a=1i,b="a" {e(1520640000000000000)}\n'.encode() + + f'tbl1 a=2i,b="b" {e(1520640000000000000)}\n'.encode() + + f'tbl1 a=3i,b="c" {e(1520640000000000000)}\n'.encode()) @unittest.skipIf(BROKEN_TIMEZONES, 'requires accurate timezones') def test_at_neg(self): @@ -313,13 +316,14 @@ def test_at_ts_0(self): e7 = qi.TimestampNanos.from_datetime(e3) edge_timestamps = [e1, e2, e3, e4, e5, e6, e7] + e = self.enc_des_ts_n for ts in edge_timestamps: buf = _dataframe(self.version, df, table_name='tbl1', at=ts) self.assertEqual( buf, - b'tbl1 a=1i,b="a" 0\n' + - b'tbl1 a=2i,b="b" 0\n' + - b'tbl1 a=3i,b="c" 0\n') + f'tbl1 a=1i,b="a" {e(0)}\n'.encode() + + f'tbl1 a=2i,b="b" {e(0)}\n'.encode() + + f'tbl1 a=3i,b="c" {e(0)}\n'.encode()) def test_single_at_col(self): df = pd.DataFrame({'timestamp': pd.to_datetime(['2023-01-01'])}) @@ -837,17 +841,18 @@ def test_datetime64_numpy_col(self): dtype='datetime64[ns]'), 'b': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i']}) buf = _dataframe(self.version, df, table_name='tbl1', at=qi.ServerTimestamp) - self.assertEqual( - buf, - b'tbl1 a=1546300800000000000n,b="a"\n' + - b'tbl1 a=1546300801000000000n,b="b"\n' + - b'tbl1 a=1546300802000000000n,b="c"\n' + - b'tbl1 a=1546300803000000000n,b="d"\n' + - b'tbl1 a=1546300804000000000n,b="e"\n' + - b'tbl1 a=1546300805000000000n,b="f"\n' + + e = self.enc_ts_n + exp = ( + f'tbl1 a={e(1546300800000000000)},b="a"\n'.encode() + + f'tbl1 a={e(1546300801000000000)},b="b"\n'.encode() + + f'tbl1 a={e(1546300802000000000)},b="c"\n'.encode() + + f'tbl1 a={e(1546300803000000000)},b="d"\n'.encode() + + f'tbl1 a={e(1546300804000000000)},b="e"\n'.encode() + + f'tbl1 a={e(1546300805000000000)},b="f"\n'.encode() + b'tbl1 b="g"\n' + b'tbl1 b="h"\n' + b'tbl1 b="i"\n') + self.assertEqual(buf, exp) df = pd.DataFrame({'a': pd.Series([ pd.Timestamp('1970-01-01 00:00:00'), @@ -856,9 +861,9 @@ def test_datetime64_numpy_col(self): buf = _dataframe(self.version, df, table_name='tbl1', at=qi.ServerTimestamp) self.assertEqual( buf, - b'tbl1 a=0n\n' + - b'tbl1 a=1000000000n\n' + - b'tbl1 a=2000000000n\n') + f'tbl1 a={e(0)}\n'.encode() + + f'tbl1 a={e(1000000000)}\n'.encode() + + f'tbl1 a={e(2000000000)}\n'.encode()) def test_datetime64_tz_arrow_col(self): df = pd.DataFrame({ @@ -875,13 +880,14 @@ def test_datetime64_tz_arrow_col(self): hour=0, minute=0, second=3, tz=_TZ)], 'b': ['sym1', 'sym2', 'sym3', 'sym4']}) buf = _dataframe(self.version, df, table_name='tbl1', symbols=['b'], at=qi.ServerTimestamp) + e = self.enc_ts_n self.assertEqual( buf, # Note how these are 5hr offset from `test_datetime64_numpy_col`. - b'tbl1,b=sym1 a=1546318800000000000n\n' + - b'tbl1,b=sym2 a=1546318801000000000n\n' + + f'tbl1,b=sym1 a={e(1546318800000000000)}\n'.encode() + + f'tbl1,b=sym2 a={e(1546318801000000000)}\n'.encode() + b'tbl1,b=sym3\n' + - b'tbl1,b=sym4 a=1546318803000000000n\n') + f'tbl1,b=sym4 a={e(1546318803000000000)}\n'.encode()) # Not epoch 0. df = pd.DataFrame({ @@ -900,9 +906,9 @@ def test_datetime64_tz_arrow_col(self): self.assertEqual( buf, # Note how these are 5hr offset from `test_datetime64_numpy_col`. - b'tbl1,b=sym1 a=18000000000000n\n' + - b'tbl1,b=sym2 a=18001000000000n\n' + - b'tbl1,b=sym3 a=18002000000000n\n') + f'tbl1,b=sym1 a={e(18000000000000)}\n'.encode() + + f'tbl1,b=sym2 a={e(18001000000000)}\n'.encode() + + f'tbl1,b=sym3 a={e(18002000000000)}\n'.encode()) # Actual epoch 0. df = pd.DataFrame({ @@ -920,9 +926,9 @@ def test_datetime64_tz_arrow_col(self): buf = _dataframe(self.version, df, table_name='tbl1', symbols=['b'], at=qi.ServerTimestamp) self.assertEqual( buf, - b'tbl1,b=sym1 a=0n\n' + - b'tbl1,b=sym2 a=1000000000n\n' + - b'tbl1,b=sym3 a=2000000000n\n') + f'tbl1,b=sym1 a={e(0)}\n'.encode() + + f'tbl1,b=sym2 a={e(1000000000)}\n'.encode() + + f'tbl1,b=sym3 a={e(2000000000)}\n'.encode()) df2 = pd.DataFrame({ 'a': [ @@ -936,8 +942,8 @@ def test_datetime64_tz_arrow_col(self): # Mostly, here assert that negative timestamps are allowed. self.assertIn( buf, - [b'tbl1,b=sym1 a=-2208970800000000000n\n', - b'tbl1,b=sym1 a=-2208971040000000000n\n']) + [f'tbl1,b=sym1 a={e(-2208970800000000000)}\n'.encode(), + f'tbl1,b=sym1 a={e(-2208971040000000000)}\n'.encode()]) def test_datetime64_numpy_at(self): df = pd.DataFrame({ @@ -954,18 +960,18 @@ def test_datetime64_numpy_at(self): dtype='datetime64[ns]'), 'b': [1, 2, 3, 4, 5, 6, 7, 8, 9]}) buf = _dataframe(self.version, df, table_name='tbl1', at='a') - self.assertEqual( - buf, - b'tbl1 b=1i 1546300800000000000\n' + - b'tbl1 b=2i 1546300801000000000\n' + - b'tbl1 b=3i 1546300802000000000\n' + - b'tbl1 b=4i 1546300803000000000\n' + - b'tbl1 b=5i 1546300804000000000\n' + - b'tbl1 b=6i 1546300805000000000\n' + + e = self.enc_des_ts_n + exp = ( + f'tbl1 b=1i {e(1546300800000000000)}\n'.encode() + + f'tbl1 b=2i {e(1546300801000000000)}\n'.encode() + + f'tbl1 b=3i {e(1546300802000000000)}\n'.encode() + + f'tbl1 b=4i {e(1546300803000000000)}\n'.encode() + + f'tbl1 b=5i {e(1546300804000000000)}\n'.encode() + + f'tbl1 b=6i {e(1546300805000000000)}\n'.encode() + b'tbl1 b=7i\n' + b'tbl1 b=8i\n' + b'tbl1 b=9i\n') - + self.assertEqual(buf, exp) df = pd.DataFrame({ 'a': pd.Series([ pd.Timestamp('1970-01-01 00:00:00'), @@ -976,9 +982,9 @@ def test_datetime64_numpy_at(self): buf = _dataframe(self.version, df, table_name='tbl1', at='a') self.assertEqual( buf, - b'tbl1 b=1i 0\n' + - b'tbl1 b=2i 1000000000\n' + - b'tbl1 b=3i 2000000000\n') + f'tbl1 b=1i {e(0)}\n'.encode() + + f'tbl1 b=2i {e(1000000000)}\n'.encode() + + f'tbl1 b=3i {e(2000000000)}\n'.encode()) def test_datetime64_tz_arrow_at(self): df = pd.DataFrame({ @@ -995,13 +1001,14 @@ def test_datetime64_tz_arrow_at(self): hour=0, minute=0, second=3, tz=_TZ)], 'b': ['sym1', 'sym2', 'sym3', 'sym4']}) buf = _dataframe(self.version, df, table_name='tbl1', symbols=['b'], at='a') - self.assertEqual( - buf, + e = self.enc_des_ts_n + exp = ( # Note how these are 5hr offset from `test_datetime64_numpy_col`. - b'tbl1,b=sym1 1546318800000000000\n' + - b'tbl1,b=sym2 1546318801000000000\n' + + f'tbl1,b=sym1 {e(1546318800000000000)}\n'.encode() + + f'tbl1,b=sym2 {e(1546318801000000000)}\n'.encode() + b'tbl1,b=sym3\n' + - b'tbl1,b=sym4 1546318803000000000\n') + f'tbl1,b=sym4 {e(1546318803000000000)}\n'.encode()) + self.assertEqual(buf, exp) df2 = pd.DataFrame({ 'a': [ diff --git a/test/test_tools.py b/test/test_tools.py index 69da3ae2..f5cc5380 100644 --- a/test/test_tools.py +++ b/test/test_tools.py @@ -1,6 +1,7 @@ import struct import numpy as np +import questdb.ingress as qi ARRAY_TYPE_TAGS = { np.float64: 10, @@ -50,4 +51,53 @@ def _array_binary_bytes(value: np.ndarray) -> bytes: ndim + shape_bytes + data_body - ) \ No newline at end of file + ) + + +class TimestampEncodingMixin: + def enc_ts_t(self, num): + return f'{num}t' + + def enc_ts_n(self, num, v=None): + protocol_version = v or self.version + if protocol_version == 1: + num = num // 1000 + suffix = 't' + else: + suffix = 'n' + return f'{num}{suffix}' + + def enc_ts(self, ts, v=None): + """encode a non-designated timestamp in ILP""" + if isinstance(ts, qi.TimestampMicros): + return self.enc_ts_u(ts.value, v=v) + elif isinstance(ts, qi.TimestampNanos): + return self.enc_ts_n(ts.value, v=v) + else: + raise ValueError(f'unsupported ts {ts!r}') + + def enc_des_ts_t(self, v=None): + protocol_version = v or self.version + if protocol_version == 1: + num = num * 1000 + suffix = '' + else: + suffix = 't' + return f'{num}{suffix}' + + def enc_des_ts_n(self, num, v=None): + protocol_version = v or self.version + if protocol_version == 1: + suffix = '' + else: + suffix = 'n' + return f'{num}{suffix}' + + def enc_des_ts(self, ts, v=None): + """encode a designated timestamp in ILP""" + if isinstance(ts, qi.TimestampMicros): + return self.enc_des_ts_t(ts.value, v=v) + elif isinstance(ts, qi.TimestampNanos): + return self.enc_des_ts_n(ts.value, v=v) + else: + raise ValueError(f'unsupported ts {ts!r}') From 9e83310a029ec9326e96add485b7579b325f5598 Mon Sep 17 00:00:00 2001 From: Adam Cimarosti Date: Thu, 9 Oct 2025 11:33:23 +0100 Subject: [PATCH 16/19] changelog --- CHANGELOG.rst | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 30150488..7cd0a464 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,24 +5,20 @@ Changelog ========= -3.1.0 (2025-10-06) +4.0.0 (2025-10-09) ------------------ -Features -~~~~~~~~ +New Breaking Change Feature +~~~~~~~~~~~~~~~~~~~~~~~~~~~ From QuestDB 9.1.0 you can use ``CREATE TABLE`` SQL statements with -``TIMESTAMP_NS`` column types, and/or configure the database to use nanosecond -precision designated timestamp columns by setting the -``line.timestamp.default.column.type=TIMESTAMP_NS`` config option in -``server.conf``. +``TIMESTAMP_NS`` column types, or rely on column auto-creation. This client release adds support for sending nanoseconds timestamps to the server without loss of precision. -The change is backwards compatible with older QuestDB releases and does not -introduce new APIs, but the sender/buffer's ``.row()`` API can now additionally -accept nanosecond precision. +This release does not introduce new APIs, instead enhancing the sender/buffer's +``.row()`` API to additionally accept nanosecond precision. .. code-block:: python @@ -41,6 +37,15 @@ accept nanosecond precision. If you're using dataframes, nanosecond timestamps are now also transferred with full precision. +The change is backwards compatible with older QuestDB releases which will simply +continue using the ``TIMESTAMP`` column, even when nanoseconds are specified in +the client. + +This is a breaking change because it introduces new breaking timestamp +`column auto-creation ` +behaviour. For full details and upgrade advice, see the +`nanosecond PR on GitHub `_. + 3.0.0 (2025-07-07) ------------------ From 7f79680f0c27252994026f0fcb820405a5deec74 Mon Sep 17 00:00:00 2001 From: Adam Cimarosti Date: Thu, 9 Oct 2025 13:20:33 +0100 Subject: [PATCH 17/19] additional tests --- src/questdb/ingress.pyx | 15 +++++++++++---- test/test.py | 17 +++++++++++++++++ test/test_tools.py | 15 +++++++++++---- 3 files changed, 39 insertions(+), 8 deletions(-) diff --git a/src/questdb/ingress.pyx b/src/questdb/ingress.pyx index 2b1d0807..93e06fe5 100644 --- a/src/questdb/ingress.pyx +++ b/src/questdb/ingress.pyx @@ -1055,15 +1055,20 @@ cdef class Buffer: if sender != NULL: may_flush_on_row_complete(self, sender) - cdef inline void_int _at_ts(self, TimestampNanos ts) except -1: + cdef inline void_int _at_ts_us(self, TimestampMicros ts) except -1: + cdef line_sender_error* err = NULL + if not line_sender_buffer_at_micros(self._impl, ts._value, &err): + raise c_err_to_py(err) + + cdef inline void_int _at_ts_ns(self, TimestampNanos ts) except -1: cdef line_sender_error* err = NULL if not line_sender_buffer_at_nanos(self._impl, ts._value, &err): raise c_err_to_py(err) cdef inline void_int _at_dt(self, cp_datetime dt) except -1: - cdef int64_t value = datetime_to_nanos(dt) + cdef int64_t value = datetime_to_micros(dt) cdef line_sender_error* err = NULL - if not line_sender_buffer_at_nanos(self._impl, value, &err): + if not line_sender_buffer_at_micros(self._impl, value, &err): raise c_err_to_py(err) cdef inline void_int _at_now(self) except -1: @@ -1074,8 +1079,10 @@ cdef class Buffer: cdef inline void_int _at(self, object ts) except -1: if ts is None: self._at_now() + elif isinstance(ts, TimestampMicros): + self._at_ts_us(ts) elif isinstance(ts, TimestampNanos): - self._at_ts(ts) + self._at_ts_ns(ts) elif isinstance(ts, cp_datetime): self._at_dt(ts) else: diff --git a/test/test.py b/test/test.py index 39a02e2e..1ef5c05a 100755 --- a/test/test.py +++ b/test/test.py @@ -76,6 +76,23 @@ class TestBases: """ class TestBuffer(unittest.TestCase, TimestampEncodingMixin): + def _test_buffer_row_ts(self, ts): + buffer = qi.Buffer(protocol_version=self.version) + buffer.row('trades', columns={'t': ts}, at=ts) + ec = self.enc_ts + ed = self.enc_des_ts + exp = f'trades t={ec(ts)} {ed(ts)}\n'.encode() + self.assertEqual(bytes(buffer), exp) + + def test_buffer_row_ts_micros(self): + self._test_buffer_row_ts(qi.TimestampMicros(10001)) + + def test_buffer_row_ts_nanos(self): + self._test_buffer_row_ts(qi.TimestampNanos(10000333)) + + def test_buffer_row_ts_datetime(self): + self._test_buffer_row_ts(datetime.datetime.now()) + def test_buffer_row_at_disallows_none(self): with self.assertRaisesRegex( qi.IngressError, diff --git a/test/test_tools.py b/test/test_tools.py index f5cc5380..09857c4a 100644 --- a/test/test_tools.py +++ b/test/test_tools.py @@ -1,5 +1,6 @@ import struct +import datetime import numpy as np import questdb.ingress as qi @@ -69,14 +70,17 @@ def enc_ts_n(self, num, v=None): def enc_ts(self, ts, v=None): """encode a non-designated timestamp in ILP""" - if isinstance(ts, qi.TimestampMicros): - return self.enc_ts_u(ts.value, v=v) + if isinstance(ts, datetime.datetime): + return self.enc_ts_t( + qi.TimestampMicros.from_datetime(ts).value) + elif isinstance(ts, qi.TimestampMicros): + return self.enc_ts_t(ts.value) elif isinstance(ts, qi.TimestampNanos): return self.enc_ts_n(ts.value, v=v) else: raise ValueError(f'unsupported ts {ts!r}') - def enc_des_ts_t(self, v=None): + def enc_des_ts_t(self, num, v=None): protocol_version = v or self.version if protocol_version == 1: num = num * 1000 @@ -95,7 +99,10 @@ def enc_des_ts_n(self, num, v=None): def enc_des_ts(self, ts, v=None): """encode a designated timestamp in ILP""" - if isinstance(ts, qi.TimestampMicros): + if isinstance(ts, datetime.datetime): + return self.enc_des_ts_t( + qi.TimestampMicros.from_datetime(ts).value, v=v) + elif isinstance(ts, qi.TimestampMicros): return self.enc_des_ts_t(ts.value, v=v) elif isinstance(ts, qi.TimestampNanos): return self.enc_des_ts_n(ts.value, v=v) From 58fe19941200d0f0f4e09ea4429e64a12d46a326 Mon Sep 17 00:00:00 2001 From: Adam Cimarosti Date: Thu, 9 Oct 2025 14:48:15 +0100 Subject: [PATCH 18/19] updated system tests, inc for upcoming version --- test/system_test.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/test/system_test.py b/test/system_test.py index 4de3254f..79fedbc6 100755 --- a/test/system_test.py +++ b/test/system_test.py @@ -27,7 +27,7 @@ import questdb.ingress as qi -QUESTDB_VERSION = '8.3.2' +QUESTDB_VERSION = '9.1.0' QUESTDB_PLAIN_INSTALL_PATH = None QUESTDB_AUTH_INSTALL_PATH = None FIRST_ARRAY_RELEASE = (8, 4, 0) @@ -211,13 +211,14 @@ def test_http(self): return resp = self.qdb_plain.retry_check_table(table_name, min_rows=3) + exp_ts_type = 'TIMESTAMP' if self.qdb_plain.version <= (9, 1, 0) else 'TIMESTAMP_NS' exp_columns = [ {'name': 'name_a', 'type': 'SYMBOL'}, {'name': 'name_b', 'type': 'BOOLEAN'}, {'name': 'name_c', 'type': 'LONG'}, {'name': 'name_d', 'type': 'DOUBLE'}, {'name': 'name_e', 'type': 'VARCHAR'}, - {'name': 'timestamp', 'type': 'TIMESTAMP'}] + {'name': 'timestamp', 'type': exp_ts_type}] self.assertEqual(resp['columns'], exp_columns) exp_dataset = [ # Comparison excludes timestamp column. From ea55206fdc0e6477254de924f396987692f06928 Mon Sep 17 00:00:00 2001 From: Adam Cimarosti Date: Thu, 9 Oct 2025 16:38:36 +0100 Subject: [PATCH 19/19] updated submodule --- CHANGELOG.rst | 7 ++++--- c-questdb-client | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 7cd0a464..fc23722a 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,13 +5,13 @@ Changelog ========= -4.0.0 (2025-10-09) +4.0.0 (2025-10-10) ------------------ New Breaking Change Feature ~~~~~~~~~~~~~~~~~~~~~~~~~~~ -From QuestDB 9.1.0 you can use ``CREATE TABLE`` SQL statements with +From QuestDB 9.1.0 onwards you can use ``CREATE TABLE`` SQL statements with ``TIMESTAMP_NS`` column types, or rely on column auto-creation. This client release adds support for sending nanoseconds timestamps to the @@ -22,7 +22,8 @@ This release does not introduce new APIs, instead enhancing the sender/buffer's .. code-block:: python - conf = f'http::addr=localhost:9000;' + conf = 'http::addr=localhost:9000;' + # or `conf = 'tcp::addr=localhost:9009;protocol_version=2;'` with Sender.from_conf(conf) as sender: sender.row( 'trade_executions', diff --git a/c-questdb-client b/c-questdb-client index a1c0a581..db91ac72 160000 --- a/c-questdb-client +++ b/c-questdb-client @@ -1 +1 @@ -Subproject commit a1c0a581ab315fb1ffd5d3d4b0d886d88d487c36 +Subproject commit db91ac724c400d3bad9a2e2acb8b2ddb5bb61ceb