From 14c5e152274eced327fdf0ae1dd1f00313f1ee8c Mon Sep 17 00:00:00 2001 From: Fernando Date: Tue, 24 Mar 2026 15:16:38 -0300 Subject: [PATCH 01/60] ENH: (CLTARCH) Remove use of thread to run async methods. this code was developed with the help of Github Copilot. --- siriuspy/siriuspy/clientarch/client.py | 69 ++++++++------------------ 1 file changed, 22 insertions(+), 47 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index dbd7387d5..ae2cbdb7e 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -11,9 +11,9 @@ import ssl as _ssl import urllib as _urllib from datetime import timedelta as _timedelta -from threading import Thread as _Thread from urllib.parse import quote as _quote +import nest_asyncio import numpy as _np import urllib3 as _urllib3 from aiohttp import ClientSession as _ClientSession @@ -40,7 +40,6 @@ def __init__(self, server_url=None, timeout=None): self.session = None self._timeout = timeout self._url = server_url or self.SERVER_URL - self._ret = None self._request_url = None # print('urllib3 InsecureRequestWarning disabled!') _urllib3.disable_warnings(_urllib3.exceptions.InsecureRequestWarning) @@ -97,13 +96,10 @@ def login(self, username, password): headers = {'User-Agent': 'Mozilla/5.0'} payload = {'username': username, 'password': password} url = self._create_url(method='login') - ret = self._run_async_event_loop( - self._create_session, - url, - headers=headers, - payload=payload, - ssl=False, + coro = self._create_session( + url, headers=headers, payload=payload, ssl=False ) + ret = self._run_sync_coro(coro) if ret is not None: self.session, authenticated = ret if authenticated: @@ -119,7 +115,8 @@ def login(self, username, password): def logout(self): """Close login session.""" if self.session: - resp = self._run_async_event_loop(self._close_session) + coro = self._close_session() + resp = self._run_sync_coro(coro) self.session = None return resp return None @@ -458,13 +455,12 @@ def _process_url_link_args(pvnames, pvoptnrpts, pvcolors, pvusediff): def _make_request(self, url, need_login=False, return_json=False): """Make request.""" self._request_url = url - response = self._run_async_event_loop( - self._handle_request, + coro = self._handle_request( url, return_json=return_json, need_login=need_login, ) - return response + return self._run_sync_coro(coro) def _create_url(self, method, **kwargs): """Create URL.""" @@ -480,40 +476,19 @@ def _create_url(self, method, **kwargs): return url # ---------- async methods ---------- - - def _run_async_event_loop(self, *args, **kwargs): - # NOTE: Run the asyncio commands in a separated Thread to isolate - # their EventLoop from the external environment (important for class - # to work within jupyter notebook environment). - _thread = _Thread( - target=self._thread_run_async_event_loop, - daemon=True, - args=args, - kwargs=kwargs, - ) - _thread.start() - _thread.join() - return self._ret - - def _thread_run_async_event_loop(self, func, *args, **kwargs): - """Get event loop.""" - close = False - try: - loop = _asyncio.get_event_loop() - except RuntimeError as error: - if 'no current event loop' in str(error): - loop = _asyncio.new_event_loop() - _asyncio.set_event_loop(loop) - close = True - else: - raise error + def _run_sync_coro(self, coro): + """Run an async coroutine synchronously, compatible with Jupyter.""" try: - self._ret = loop.run_until_complete(func(*args, **kwargs)) - except _asyncio.TimeoutError: - raise _exceptions.TimeoutError - - if close: - loop.close() + loop = _asyncio.get_running_loop() + try: + return loop.run_until_complete(coro) + except RuntimeError: + # Event loop already running (typical in Jupyter notebooks). + nest_asyncio.apply(loop) + return loop.run_until_complete(coro) + except RuntimeError: + # No running loop, create a new one + return _asyncio.run(coro) async def _handle_request(self, url, return_json=False, need_login=False): """Handle request.""" @@ -562,8 +537,8 @@ async def _get_request_response(self, url, session, return_json): except ValueError: _log.error(f'Error with URL {response.url}') response = None - except _asyncio.TimeoutError as err_msg: - raise _exceptions.TimeoutError(err_msg) + except _asyncio.TimeoutError as err: + raise _exceptions.TimeoutError from err return response async def _create_session(self, url, headers, payload, ssl): From 6299426af03a5ae71017ca67e669dd8a96f56375 Mon Sep 17 00:00:00 2001 From: Fernando Date: Tue, 24 Mar 2026 15:24:50 -0300 Subject: [PATCH 02/60] BUG: update requirements.txt with nest_asyncio. --- siriuspy/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/siriuspy/requirements.txt b/siriuspy/requirements.txt index d7d73e713..3ac3aa082 100644 --- a/siriuspy/requirements.txt +++ b/siriuspy/requirements.txt @@ -1,6 +1,7 @@ aiohttp>=3.7.4 bottleneck>=1.3.2 mathphys +nest_asyncio numpy<=1.23 scipy<=1.13 pyepics>=3.4.0 From eb1a25e1d60f8ec82d9e7cb585bd2e34971e15ac Mon Sep 17 00:00:00 2001 From: Fernando Date: Wed, 25 Mar 2026 11:24:06 -0300 Subject: [PATCH 03/60] STY: (CLTARCH) format code. --- siriuspy/siriuspy/clientarch/client.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index ae2cbdb7e..c81645735 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -349,7 +349,7 @@ def gen_archviewer_url_link( time_ref=None, pvoptnrpts=None, pvcolors=None, - pvusediff=False + pvusediff=False, ): """Generate a Archiver Viewer URL for the given PVs. @@ -393,7 +393,8 @@ def gen_archviewer_url_link( # Thanks to Rafael Lyra for the basis of this implementation! archiver_viewer_url = _envars.SRVURL_ARCHIVER_VIEWER + '/?pvConfig=' args = ClientArchiver._process_url_link_args( - pvnames, pvoptnrpts, pvcolors, pvusediff) + pvnames, pvoptnrpts, pvcolors, pvusediff + ) pvoptnrpts, pvcolors, pvusediff = args pv_search = '' for idx in range(len(pvnames)): @@ -456,9 +457,7 @@ def _make_request(self, url, need_login=False, return_json=False): """Make request.""" self._request_url = url coro = self._handle_request( - url, - return_json=return_json, - need_login=need_login, + url, return_json=return_json, need_login=need_login ) return self._run_sync_coro(coro) From 7227a31cf953f2b06ed8157cad90ea832b89ae32 Mon Sep 17 00:00:00 2001 From: Fernando Date: Wed, 25 Mar 2026 11:30:21 -0300 Subject: [PATCH 04/60] MNT: (CLTARCH) Remove nest_asyncio; use thread based method. This code was done with the help of Google AI Mode. Since the nest_asyncio package was archived, we implemented again a solution based on running a different event loop in a new thread however, this thread is long lived and all tasks are submitted using asyncio.run_coroutine_threadsafe function. Which guarantees that exceptions will be passed to the end user and that there will be no infinite hangs. --- siriuspy/requirements.txt | 1 - siriuspy/siriuspy/clientarch/client.py | 91 ++++++++++++++++++++------ 2 files changed, 72 insertions(+), 20 deletions(-) diff --git a/siriuspy/requirements.txt b/siriuspy/requirements.txt index 3ac3aa082..d7d73e713 100644 --- a/siriuspy/requirements.txt +++ b/siriuspy/requirements.txt @@ -1,7 +1,6 @@ aiohttp>=3.7.4 bottleneck>=1.3.2 mathphys -nest_asyncio numpy<=1.23 scipy<=1.13 pyepics>=3.4.0 diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index c81645735..cf5c8ce03 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -8,15 +8,15 @@ import asyncio as _asyncio import logging as _log -import ssl as _ssl import urllib as _urllib from datetime import timedelta as _timedelta +from threading import Thread as _Thread from urllib.parse import quote as _quote -import nest_asyncio import numpy as _np import urllib3 as _urllib3 from aiohttp import ClientSession as _ClientSession + try: from lzstring import LZString as _LZString except: @@ -34,6 +34,10 @@ class ClientArchiver: SERVER_URL = _envars.SRVURL_ARCHIVER ENDPOINT = '/mgmt/bpl' + def __delete__(self): + """Turn off thread when deleting.""" + self.shutdown() + def __init__(self, server_url=None, timeout=None): """Initialize.""" timeout = timeout or ClientArchiver.DEFAULT_TIMEOUT @@ -41,17 +45,47 @@ def __init__(self, server_url=None, timeout=None): self._timeout = timeout self._url = server_url or self.SERVER_URL self._request_url = None - # print('urllib3 InsecureRequestWarning disabled!') + self._thread = self._loop = None + self.connect() _urllib3.disable_warnings(_urllib3.exceptions.InsecureRequestWarning) + def connect(self): + """Starts bg. event loop in a separate thread. + + Raises: + RuntimeError: when library is alread connected. + """ + if self._loop_alive(): + return + + self._loop = _asyncio.new_event_loop() + self._thread = _Thread(target=self._run_event_loop, daemon=True) + self._thread.start() + + def shutdown(self, timeout=5): + """Safely stops the bg. loop and waits for the thread to exit.""" + if not self._loop_alive(): + return + + # 1. Cancel all pending tasks in the loop (to avoid ResourceWarnings) + self._loop.call_soon_threadsafe(self._cancel_all_tasks) + + # 2. Schedule the loop to stop processing + self._loop.call_soon_threadsafe(self._loop.stop) + + # 3. Wait for the thread to actually finish + self._thread.join(timeout=timeout) + if self._thread.is_alive(): + print('Warning: Background thread did not stop in time.') + @property def connected(self): """Connected.""" + if not self._loop_alive(): + return False try: - status = _urllib.request.urlopen( - self._url, timeout=self._timeout, context=_ssl.SSLContext() - ).status - return status == 200 + resp = self._make_request(self._url, return_json=False) + return resp.status == 200 except _urllib.error.URLError: return False @@ -453,6 +487,31 @@ def _process_url_link_args(pvnames, pvoptnrpts, pvcolors, pvusediff): pvusediff = [pvusediff] * len(pvnames) return pvoptnrpts, pvcolors, pvusediff + def _loop_alive(self): + """Check if thread is alive and loop is running.""" + return ( + self._thread is not None + and self._thread.is_alive() + and self._loop.is_running() + ) + + def _cancel_all_tasks(self): + """Helper to cancel tasks (must be called from the loop's thread).""" + if hasattr(_asyncio, 'all_tasks'): + all_tasks = _asyncio.all_tasks(loop=self._loop) + else: # python 3.6 + all_tasks = _asyncio.Task.all_tasks(loop=self._loop) + + for task in all_tasks: + task.cancel() + + def _run_event_loop(self): + _asyncio.set_event_loop(self._loop) + try: + self._loop.run_forever() + finally: + self._loop.close() + def _make_request(self, url, need_login=False, return_json=False): """Make request.""" self._request_url = url @@ -474,20 +533,14 @@ def _create_url(self, method, **kwargs): url += '&'.join(['{}={}'.format(k, v) for k, v in kwargs.items()]) return url - # ---------- async methods ---------- def _run_sync_coro(self, coro): """Run an async coroutine synchronously, compatible with Jupyter.""" - try: - loop = _asyncio.get_running_loop() - try: - return loop.run_until_complete(coro) - except RuntimeError: - # Event loop already running (typical in Jupyter notebooks). - nest_asyncio.apply(loop) - return loop.run_until_complete(coro) - except RuntimeError: - # No running loop, create a new one - return _asyncio.run(coro) + if not self._thread.is_alive(): + raise RuntimeError('Library is shut down') + future = _asyncio.run_coroutine_threadsafe(coro, self._loop) + return future.result(timeout=self._timeout) + + # ---------- async methods ---------- async def _handle_request(self, url, return_json=False, need_login=False): """Handle request.""" From 949f9cac86d3ebfe603ec72de5013f62bdd02c0d Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 26 Mar 2026 08:37:51 -0300 Subject: [PATCH 05/60] STY: (CLTARC.TIME) import only TypeError. Only one being used. --- siriuspy/siriuspy/clientarch/time.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/time.py b/siriuspy/siriuspy/clientarch/time.py index ef5472b17..12543552c 100644 --- a/siriuspy/siriuspy/clientarch/time.py +++ b/siriuspy/siriuspy/clientarch/time.py @@ -3,7 +3,7 @@ from calendar import timegm as _timegm from datetime import datetime as _datetime, timedelta as _timedelta -from . import exceptions as _exceptions +from .exceptions import TypeError as _TypeError class Time(_datetime): @@ -46,7 +46,7 @@ class Time(_datetime): def __new__(cls, *args, **kwargs): """New object.""" if not args and not kwargs: - raise _exceptions.TypeError( + raise _TypeError( 'no arguments found to build Time object' ) if len(args) == 1: @@ -59,9 +59,10 @@ def __new__(cls, *args, **kwargs): else Time._DEFAULT_TIMESTAMP_FORMAT ) return Time.strptime(args[0], timestamp_format) - raise _exceptions.TypeError( + raise _TypeError( f'argument of unexpected type {type(args[0])}' ) + if len(kwargs) == 1: if 'timestamp' in kwargs: return Time.fromtimestamp(kwargs['timestamp']) @@ -70,20 +71,21 @@ def __new__(cls, *args, **kwargs): kwargs['timestamp_string'], Time._DEFAULT_TIMESTAMP_FORMAT ) if set(kwargs.keys()) & Time._DATETIME_ARGS: - raise _exceptions.TypeError( + raise _TypeError( 'missing input arguments, verify usage options.' ) - raise _exceptions.TypeError(f'unexpected key argument {kwargs}') + raise _TypeError(f'unexpected key argument {kwargs}') + if len(kwargs) == 2: if set(kwargs.keys()) == {'timestamp_string', 'timestamp_format'}: return Time.strptime( kwargs['timestamp_string'], kwargs['timestamp_format'] ) if set(kwargs.keys()) & Time._DATETIME_ARGS: - raise _exceptions.TypeError( + raise _TypeError( 'missing input arguments, verify usage options.' ) - raise _exceptions.TypeError( + raise _TypeError( f'unexpected key arguments {list(kwargs.keys())}' ) return super().__new__(cls, *args, **kwargs) From 2f8ce32e43963d3daff9253893c741ac30373840 Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 26 Mar 2026 08:58:45 -0300 Subject: [PATCH 06/60] ENH: (CLTARC.TIME) Re-implement get_time_intervals. previous implementation was obscure and had a bug. last interval had duration larger than specified by `interval` input. --- siriuspy/siriuspy/clientarch/time.py | 52 +++++++++++++++------------- 1 file changed, 27 insertions(+), 25 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/time.py b/siriuspy/siriuspy/clientarch/time.py index 12543552c..504981679 100644 --- a/siriuspy/siriuspy/clientarch/time.py +++ b/siriuspy/siriuspy/clientarch/time.py @@ -3,6 +3,8 @@ from calendar import timegm as _timegm from datetime import datetime as _datetime, timedelta as _timedelta +import numpy as _np + from .exceptions import TypeError as _TypeError @@ -123,29 +125,29 @@ def conv_to_epoch(time, datetime_format): def get_time_intervals( - time_start, time_stop, interval, return_isoformat=False + time_start: Time, time_stop: Time, interval: int, return_isoformat=False ): - """Return intervals of 'interval' duration from time_start to time_stop.""" - if time_start + interval >= time_stop: - timestamp_start = ( - time_start.get_iso8601() if return_isoformat else time_start - ) - timestamp_stop = ( - time_stop.get_iso8601() if return_isoformat else time_stop - ) - else: - t_start = time_start - t_stop = t_start + interval - timestamp_start = [t_start] - timestamp_stop = [t_stop] - while t_stop < time_stop: - t_start += interval - t_stop = t_stop + interval - if t_stop + interval > time_stop: - t_stop = time_stop - timestamp_start.append(t_start) - timestamp_stop.append(t_stop) - if return_isoformat: - timestamp_start = [t.get_iso8601() for t in timestamp_start] - timestamp_stop = [t.get_iso8601() for t in timestamp_stop] - return timestamp_start, timestamp_stop + """Break `time_start` to `time_stop` in intervals of `interval` seconds. + + Args: + time_start (Time): start time. + time_stop (Time): stop time. + interval (int): interval duration in seconds. + return_isoformat (bool): return in iso8601 format. + + Returns: + start_time (Time|str | list[Time|str]): start times. + stop_time (Time|str | list[Time|str]): stop times. + """ + t_start = time_start.timestamp() + t_stop = time_stop.timestamp() + t_start = _np.arange(t_start, t_stop, interval) + t_stop = _np.r_[t_start[1:], t_stop] + t_start = [Time(t) for t in t_start] + t_stop = [Time(t) for t in t_stop] + if return_isoformat: + t_start = [t.get_iso8601() for t in t_start] + t_stop = [t.get_iso8601() for t in t_stop] + if len(t_start) == 1: + return t_start[0], t_stop[0] + return t_start, t_stop From b5031474514dc0b1dd4b8d2de0c1422fd9d5ddec Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 26 Mar 2026 14:08:38 -0300 Subject: [PATCH 07/60] ENH: (CLTARC.TIME) Improve and standardize constructor of Time class. previous implementation of constructor was obscure and didn't accept some standard inputs, such as the result from calling Time.get_iso8601(). --- siriuspy/siriuspy/clientarch/time.py | 131 +++++++++++++++------------ 1 file changed, 75 insertions(+), 56 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/time.py b/siriuspy/siriuspy/clientarch/time.py index 504981679..da22f9b7d 100644 --- a/siriuspy/siriuspy/clientarch/time.py +++ b/siriuspy/siriuspy/clientarch/time.py @@ -5,21 +5,27 @@ import numpy as _np -from .exceptions import TypeError as _TypeError - class Time(_datetime): - """Time conversion class. + """Time class.""" + + _DEFAULT_TIMESTAMP_FORMAT = '%Y-%m-%d %H:%M:%S.%f' - Usage options: + def __new__(cls, *args, **kwargs): # noqa: D417, C901 + """Create Time object. + + Usage options: + + Time(datetime) + datetime is a keyword/positional argument of datetime|Time class. Time(timestamp) - timestamp is a float/int keyword/positional argument. + timestamp is a float|int keyword/positional argument. Time(timestamp_string) Time(timestamp_string, timestamp_format='%Y-%m-%d %H:%M:%S.%f') - timestamp_string is a str keyword/positional argument. - timestamp_format is an optional keyword argument for string - formating. Defaults to '%Y-%m-%d %H:%M:%S.%f'. + `timestamp_string` is a str keyword/positional argument. + `timestamp_format` is an optional keyword argument for string + formating. Defaults to '%Y-%m-%d %H:%M:%S.%f' or iso8601. Time(year, month, day) Time(year, month, day, hour) @@ -31,66 +37,79 @@ class Time(_datetime): are integer keyword/positional arguments. tzinfo must be None or of a tzinfo subclass keyword/positional argument. - """ - _DEFAULT_TIMESTAMP_FORMAT = '%Y-%m-%d %H:%M:%S.%f' - _DATETIME_ARGS = { - 'year', - 'month', - 'day', - 'hour', - 'minute', - 'second', - 'microsecond', - 'tzinfo', - } - - def __new__(cls, *args, **kwargs): - """New object.""" + Any of the above options (apart from the last) can be used with an + additional keyword argument for `tzinfo`. + + Args: + datetime (datetime|Time): keyword/positional argument. + timestamp (float|int): keyword/positional argument. + timestamp_string (str): keyword/positional argument. + timestamp_format (str): keyword argument for string formating. + year (int): keyword/positional argument. + month (int): keyword/positional argument. + day (int): keyword/positional argument. + hour (int): keyword/positional argument. + minute (int): keyword/positional argument. + second (int): keyword/positional argument. + microsecond (int): keyword/positional argument. + tzinfo (tzinfo): keyword/positional argument. Defaults to None. + """ if not args and not kwargs: - raise _TypeError( + raise TypeError( 'no arguments found to build Time object' ) if len(args) == 1: - if isinstance(args[0], (float, int)): - return Time.fromtimestamp(args[0]) - if isinstance(args[0], str): - timestamp_format = ( - kwargs['timestamp_format'] - if 'timestamp_format' in kwargs - else Time._DEFAULT_TIMESTAMP_FORMAT + arg = args[0] + dic_ = { + 'timestamp': (int, float), + 'timestamp_string': (str, ), + 'datetime': (_datetime, ), + } + if not isinstance(arg, sum(dic_.values(), ())): + raise TypeError( + f'Argument of unexpected type {type(arg)}' ) - return Time.strptime(args[0], timestamp_format) - raise _TypeError( - f'argument of unexpected type {type(args[0])}' - ) - if len(kwargs) == 1: - if 'timestamp' in kwargs: - return Time.fromtimestamp(kwargs['timestamp']) - if 'timestamp_string' in kwargs: - return Time.strptime( - kwargs['timestamp_string'], Time._DEFAULT_TIMESTAMP_FORMAT - ) - if set(kwargs.keys()) & Time._DATETIME_ARGS: - raise _TypeError( - 'missing input arguments, verify usage options.' + for key, typ in dic_.items(): + if isinstance(arg, typ) and key not in kwargs: + kwargs[key] = arg + break + else: + raise TypeError( + 'Conflicting positional and keyword arguments.' ) - raise _TypeError(f'unexpected key argument {kwargs}') - if len(kwargs) == 2: - if set(kwargs.keys()) == {'timestamp_string', 'timestamp_format'}: - return Time.strptime( - kwargs['timestamp_string'], kwargs['timestamp_format'] + if not {'timestamp', 'timestamp_string'} - kwargs.keys(): + raise TypeError( + 'Conflicting positional and keyword arguments.' ) - if set(kwargs.keys()) & Time._DATETIME_ARGS: - raise _TypeError( - 'missing input arguments, verify usage options.' + elif len(args) == 8: + if 'tzinfo' in kwargs: + raise TypeError( + 'Conflicting positional and keyword arguments.' ) - raise _TypeError( - f'unexpected key arguments {list(kwargs.keys())}' + kwargs['tzinfo'] = args[7] + args = args[:7] + + tim = None + if 'datetime' in kwargs: + tim = Time.fromtimestamp(kwargs['datetime'].timestamp()) + elif 'timestamp' in kwargs: + tim = Time.fromtimestamp(kwargs['timestamp']) + elif 'timestamp_string' in kwargs: + ts_fmt = kwargs.get( + 'timestamp_format', Time._DEFAULT_TIMESTAMP_FORMAT ) - return super().__new__(cls, *args, **kwargs) + ts_str = kwargs['timestamp_string'] + try: + tim = Time.strptime(ts_str, ts_fmt) + except ValueError: + return Time.fromisoformat(ts_str) + else: + tim = super().__new__(cls, *args, **kwargs) + + return tim.replace(kwargs.get('tzinfo', tim.tzinfo)) def get_iso8601(self): """Get iso8601 format.""" From 2b4ba541d13159a83f4392f6e879a8620f0e0cea Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 26 Mar 2026 14:09:18 -0300 Subject: [PATCH 08/60] TST: (CLTARC.TIME) Implement tests for Time and get_time_intervals. --- siriuspy/tests/clientarch/__init__.py | 0 siriuspy/tests/clientarch/test_clientarch.py | 165 +++++++++++++++++++ 2 files changed, 165 insertions(+) create mode 100644 siriuspy/tests/clientarch/__init__.py create mode 100644 siriuspy/tests/clientarch/test_clientarch.py diff --git a/siriuspy/tests/clientarch/__init__.py b/siriuspy/tests/clientarch/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/siriuspy/tests/clientarch/test_clientarch.py b/siriuspy/tests/clientarch/test_clientarch.py new file mode 100644 index 000000000..6fcbd86cb --- /dev/null +++ b/siriuspy/tests/clientarch/test_clientarch.py @@ -0,0 +1,165 @@ + + + + + + + + + +#!/usr/bin/env python-sirius + +"""Test the archiver client class.""" +from unittest import TestCase +import datetime + +from siriuspy.clientarch.time import Time, get_time_intervals + +import siriuspy.util as util + + +class TestClientArchTime(TestCase): + """Test update and delete config meets requirements.""" + + def test_constructor(self): + """Test api.""" + tz_local = datetime.datetime.now().astimezone().tzinfo + tim_dt = datetime.datetime(2025, 1, 8, 10, 13, 14, 4587, tz_local) + tim_dt_naive = datetime.datetime(2025, 1, 8, 10, 13, 14, 4587) + try: + + tim_naive = Time(2025, 1, 8) + tim_naive = Time(2025, 1, 8, 10) + tim_naive = Time(2025, 1, 8, 10, 13) + tim_naive = Time(2025, 1, 8, 10, 13, 14) + tim_naive = Time(2025, 1, 8, 10, 13, 14, 4587) + + tim_ts1 = Time(tim_naive.timestamp()) + tim_ts2 = Time(tim_naive.strftime( + tim_naive._DEFAULT_TIMESTAMP_FORMAT) + ) + tim_ts3 = Time(tim_naive.get_iso8601()) + tim_ts4 = Time(tim_dt_naive) + tim_ts5 = Time(tim_dt) + except Exception as err: + self.fail(err) + + self.assertEqual(tim_ts1, tim_naive) + self.assertEqual(tim_ts2, tim_naive) + self.assertNotEqual(tim_ts3, tim_naive) + self.assertEqual(tim_ts4, tim_naive) + self.assertNotEqual(tim_ts5, tim_naive) + + tz_info = datetime.timezone(datetime.timedelta(seconds=-3600)) + try: + tim = Time(2025, 1, 8, 10, 13, 14, 4587, tz_info) + tim = Time(2025, 1, 8, tzinfo=tz_info) + tim = Time(2025, 1, 8, 10, tzinfo=tz_info) + tim = Time(2025, 1, 8, 10, 13, tzinfo=tz_info) + tim = Time(2025, 1, 8, 10, 13, 14, tzinfo=tz_info) + tim = Time(2025, 1, 8, 10, 13, 14, 4587, tzinfo=tz_info) + + tim_ts1 = Time(tim_naive.timestamp(), tzinfo=tz_info) + tim_ts2 = Time( + tim_naive.strftime(tim_naive._DEFAULT_TIMESTAMP_FORMAT), + tzinfo=tz_info + ) + tim_ts3 = Time(tim_naive.get_iso8601(), tzinfo=tz_info) + tim_ts4 = Time(tim_dt) + tim_ts5 = Time(tim_dt, tzinfo=tz_info) + except Exception as err: + self.fail(err) + + self.assertNotEqual(tim, tim_naive) + self.assertEqual(tim_ts1, tim) + self.assertEqual(tim_ts2, tim) + self.assertEqual(tim_ts3, tim) + self.assertNotEqual(tim_ts4, tim) + self.assertEqual(tim_ts5, tim) + + with self.assertRaises(ValueError): + Time('2025-01-ladieno') + with self.assertRaises(TypeError): + Time((tim, )) + ts_int = tim.timestamp() + ts_str = tim.get_iso8601() + with self.assertRaises(TypeError): + Time(ts_int, timestamp=ts_int) + with self.assertRaises(TypeError): + Time(ts_str, timestamp=ts_str) + with self.assertRaises(TypeError): + Time(ts_int, timestamp_string=ts_int) + with self.assertRaises(TypeError): + Time(ts_str, timestamp=ts_int) + with self.assertRaises(TypeError): + Time(timestamp=ts_int, timestamp_string=ts_str) + with self.assertRaises(TypeError): + Time(timestamp_string=ts_int) + with self.assertRaises(TypeError): + Time(timestamp=ts_str) + + def test_get_time_intervals(self): + """Test get_time_intervals.""" + time_start = Time(2026, 1, 13, 0, 0, 0, 345) + time_stop = time_start + 24*3600 + interval = 3600*10 + + tst_corr = [ + '2026-01-13T00:00:00.000345-03:00', + '2026-01-13T10:00:00.000345-03:00', + '2026-01-13T20:00:00.000345-03:00' + ] + tsp_corr = [ + '2026-01-13T10:00:00.000345-03:00', + '2026-01-13T20:00:00.000345-03:00', + '2026-01-14T00:00:00.000345-03:00' + ] + tst, tsp = get_time_intervals( + time_start, time_stop, interval, return_isoformat=True + ) + self.assertEqual(tst, tst_corr) + self.assertEqual(tsp, tsp_corr) + + tst_corr = [Time(t) for t in tst_corr] + tsp_corr = [Time(t) for t in tsp_corr] + tst, tsp = get_time_intervals( + time_start, time_stop, interval, return_isoformat=False + ) + self.assertEqual(tst, tst_corr) + self.assertEqual(tsp, tsp_corr) + + time_stop = time_start + 4*3600 + tst, tsp = get_time_intervals( + time_start, time_stop, interval, return_isoformat=False + ) + self.assertEqual(tst, time_start) + self.assertEqual(tsp, time_stop) + + +class TestConfigServiceConTimestamp(TestCase): + """Test response error handling.""" + + def _test_conv_timestamp(self): + """Test timestamp conversion.""" + # TODO: NOT WORKING ON TRAVIS + samples = { + ("Dec 11, 2017", 1512957600.0), + ("12/11/2017", 1512957600.0), + ("2017/12/11", 1512957600.0), + ("2017-12-11", 1512957600.0), + ("Dec 11 2017 14:00:00", 1513008000.0), + ("12/11/2017 14:00:00", 1513008000.0), + ("2017/12/11 14:00:00", 1513008000.0), + ("2017-12-11 14:00:00", 1513008000.0), + ("2017-12-11T14:00:00", 1513008000.0), + ("2017-12-11 14:00:00+01:00", 1512997200.0), + ("2017-12-11T14:00:00+01:00", 1512997200.0), + ("2017-12-11T14:00:00.45", 1513008000.45), + } + + for sample in samples: + date_string = sample[0] + timestamp = sample[1] + self.assertEqual( + ConfigDBClient.conv_timestamp_txt_2_flt(date_string), + timestamp) From 33545ff05daaad81def882149514f8b6566112c6 Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 26 Mar 2026 14:20:28 -0300 Subject: [PATCH 09/60] ENH: (CLTARC.EXEP) Add PayloadError to the list of exceptions. --- siriuspy/siriuspy/clientarch/exceptions.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/siriuspy/siriuspy/clientarch/exceptions.py b/siriuspy/siriuspy/clientarch/exceptions.py index 9d5939069..9a7e35957 100644 --- a/siriuspy/siriuspy/clientarch/exceptions.py +++ b/siriuspy/siriuspy/clientarch/exceptions.py @@ -2,6 +2,8 @@ import asyncio as _asyncio +import aiohttp.client_exceptions as _aio_excep + class ClientArchError(Exception): """ClientArch Abstract Exception.""" @@ -15,6 +17,10 @@ class TimeoutError(ClientArchError, _asyncio.TimeoutError): """ClientArch Timeout Exception.""" +class PayloadError(ClientArchError, _aio_excep.ClientPayloadError): + """ClientArch Timeout Exception.""" + + class RuntimeError(ClientArchError, RuntimeError): """ClientArch Runtime Exception.""" From 23b0a0be83a4d65de07ec6db7498a0669a36c6d8 Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 26 Mar 2026 14:24:35 -0300 Subject: [PATCH 10/60] API: (CLTARC.CLT) Now the option get_request_url will return all urls created by getData, not only a simplified one. --- siriuspy/siriuspy/clientarch/client.py | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index cf5c8ce03..b0bac8836 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -282,24 +282,11 @@ def getData( process_str += '_' + str(int(stddev)) pvname = [process_str + '(' + pvn + ')' for pvn in pvname] - if get_request_url: - tstart = _urllib.parse.quote(timestamp_start[0]) - tstop = _urllib.parse.quote(timestamp_stop[-1]) - url = [ - self._create_url( - method='getData.json', - pv=pvn, - **{'from': tstart, 'to': tstop}, - ) - for pvn in pvname - ] - return url[0] if len(pvname) == 1 else url - pvn2idcs = dict() all_urls = list() for i, pvn in enumerate(pvname): urls = [] - for tstart, tstop in zip(timestamp_start, timestamp_stop): + for tstart, tstop in zip(tstamps_start, tstamps_stop): urls.append( self._create_url( method='getData.json', @@ -315,6 +302,9 @@ def getData( end = len(all_urls) pvn2idcs[pvname_orig[i]] = _np.arange(ini, end) + if get_request_url: + return all_urls[0] if len(all_urls) == 1 else all_urls + resps = self._make_request(all_urls, return_json=True) if not resps: return None From 22ab6e905944fb66060eb9af933e23be05ad676d Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 26 Mar 2026 14:47:03 -0300 Subject: [PATCH 11/60] API: (CLTARC.CLT) Move property query_bin_interval to ClientArchiver. now the properties in PVData and PVDataSet are called `query_bin_interval` instead of `parallel_query_bin_interval` because all interaction with server will be parallel. If a single request is desired, than query_bin_interval should be modified accordingly. --- siriuspy/siriuspy/clientarch/client.py | 56 ++++++++++++++---- siriuspy/siriuspy/clientarch/pvarch.py | 79 ++++++-------------------- 2 files changed, 62 insertions(+), 73 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index b0bac8836..9b1507dfa 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -15,7 +15,10 @@ import numpy as _np import urllib3 as _urllib3 -from aiohttp import ClientSession as _ClientSession +from aiohttp import ( + client_exceptions as _aio_exceptions, + ClientSession as _ClientSession +) try: from lzstring import LZString as _LZString @@ -24,12 +27,13 @@ from .. import envars as _envars from . import exceptions as _exceptions -from .time import Time as _Time +from .time import get_time_intervals as _get_time_intervals, Time as _Time class ClientArchiver: """Archiver Data Fetcher class.""" + DEF_QUERY_BIN_INTERVAL = 12 * 60 * 60 # 12h DEFAULT_TIMEOUT = 5.0 # [s] SERVER_URL = _envars.SRVURL_ARCHIVER ENDPOINT = '/mgmt/bpl' @@ -46,6 +50,7 @@ def __init__(self, server_url=None, timeout=None): self._url = server_url or self.SERVER_URL self._request_url = None self._thread = self._loop = None + self._query_bin_interval = self.DEF_QUERY_BIN_INTERVAL self.connect() _urllib3.disable_warnings(_urllib3.exceptions.InsecureRequestWarning) @@ -120,6 +125,20 @@ def server_url(self, url): self.logout() self._url = url + @property + def query_bin_interval(self): + """Parallel query bin interval.""" + return self._query_bin_interval + + @query_bin_interval.setter + def query_bin_interval(self, new_intvl): + if not isinstance(new_intvl, (float, int)): + raise _exceptions.TypeError( + 'expected argument of type float or int, got ' + + str(type(new_intvl)) + ) + self._query_bin_interval = new_intvl + @property def last_requested_url(self): """.""" @@ -261,17 +280,34 @@ def getData( """ if isinstance(pvname, str): pvname = [pvname] - if isinstance(timestamp_start, str): + + if not isinstance(timestamp_start, (list, tuple)): timestamp_start = [timestamp_start] - if isinstance(timestamp_stop, str): + if not isinstance(timestamp_stop, (list, tuple)): timestamp_stop = [timestamp_stop] - if not isinstance(timestamp_start, (list, tuple)) or not isinstance( - timestamp_stop, (list, tuple) - ): - raise _exceptions.TypeError( - "'timestampstart' and 'timestamp_stop' arguments must be " - 'timestamp strings or iterable.' + + if len(timestamp_start) != len(timestamp_stop): + raise _exceptions.IndexError( + '`timestamp_start` and `timestamp_stop` must have same length.' + ) + + tstamps_start = [] + tstamps_stop = [] + for tst, tsp in zip(timestamp_start, timestamp_stop): # noqa: B905 + try: + tst = _Time(tst) + tsp = _Time(tsp) + except (TypeError, ValueError) as err: + raise _exceptions.TypeError( + '`timestamp_start` and `timestamp_stop` must be either ' + 'timestamp string, integer timestamp or Time objects. ' + 'Or an iterable of these objects.' + ) from err + tstarts, tstops = _get_time_intervals( + tst, tsp, self.query_bin_interval, return_isoformat=True ) + tstamps_start.extend(tstarts) + tstamps_stop.extend(tstops) pvname_orig = list(pvname) if process_type: diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index ff89062c0..0cff51e72 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -9,7 +9,7 @@ from .. import envars as _envars from . import exceptions as _exceptions from .client import ClientArchiver as _ClientArchiver -from .time import get_time_intervals as _get_time_intervals, Time as _Time +from .time import Time as _Time class _Base: @@ -77,6 +77,15 @@ def connected(self): return False return self.connector.connected + @property + def query_bin_interval(self): + """Parallel query bin interval.""" + return self.connector._query_bin_interval + + @query_bin_interval.setter + def query_bin_interval(self, new_intvl): + self.connector.query_bin_interval = new_intvl + def switch_to_online_data(self): """.""" if self.connector: @@ -335,20 +344,6 @@ def request_url(self): ) return url - @property - def parallel_query_bin_interval(self): - """Parallel query bin interval.""" - return self._parallel_query_bin_interval - - @parallel_query_bin_interval.setter - def parallel_query_bin_interval(self, new_intvl): - if not isinstance(new_intvl, (float, int)): - raise _exceptions.TypeError( - 'expected argument of type float or int, got ' - + str(type(new_intvl)) - ) - self._parallel_query_bin_interval = new_intvl - @property def timestamp(self): """Timestamp data.""" @@ -369,7 +364,7 @@ def severity(self): """Severity data.""" return self._severity - def update(self, mean_sec=None, parallel=True, timeout=None): + def update(self, mean_sec=None, timeout=None): """Update.""" self.connect() if timeout is not None: @@ -379,22 +374,10 @@ def update(self, mean_sec=None, parallel=True, timeout=None): return process_type = 'mean' if mean_sec is not None else '' - interval = self.parallel_query_bin_interval - if parallel: - timestamp_start, timestamp_stop = _get_time_intervals( - self.time_start, - self.time_stop, - interval, - return_isoformat=True, - ) - else: - timestamp_start = self.time_start.get_iso8601() - timestamp_stop = self.time_stop.get_iso8601() - data = self.connector.getData( self._pvname, - timestamp_start, - timestamp_stop, + self.time_start, + self.time_stop, process_type=process_type, interval=mean_sec, ) @@ -585,25 +568,7 @@ def archived(self): archived = set(self._pvnames) - set(self.not_archived) return list(archived) - @property - def parallel_query_bin_interval(self): - """Parallel query bin interval.""" - return self._parallel_query_bin_interval - - @parallel_query_bin_interval.setter - def parallel_query_bin_interval(self, new_intvl): - if not isinstance(new_intvl, (float, int)): - raise _exceptions.TypeError( - 'expected argument of type float or int, got ' - + str(type(new_intvl)) - ) - self._parallel_query_bin_interval = new_intvl - for pvname in self._pvnames: - self._pvdata[ - pvname - ].parallel_query_bin_interval = self._parallel_query_bin_interval - - def update(self, mean_sec=None, parallel=True, timeout=None): + def update(self, mean_sec=None, timeout=None): """Update.""" self.connect() if timeout is not None: @@ -613,22 +578,10 @@ def update(self, mean_sec=None, parallel=True, timeout=None): return process_type = 'mean' if mean_sec is not None else '' - interval = self.parallel_query_bin_interval - if parallel: - timestamp_start, timestamp_stop = _get_time_intervals( - self.time_start, - self.time_stop, - interval, - return_isoformat=True, - ) - else: - timestamp_start = self.time_start.get_iso8601() - timestamp_stop = self.time_stop.get_iso8601() - data = self.connector.getData( self._pvnames, - timestamp_start, - timestamp_stop, + self.time_start, + self.time_stop, process_type=process_type, interval=mean_sec, ) From a6dd441682e80ff8c641b9bb2584e08bcf0a601c Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 26 Mar 2026 14:52:51 -0300 Subject: [PATCH 12/60] MNT: (CLTARC) Remove unused code relate to old parallel_query_bin_interval. --- siriuspy/siriuspy/clientarch/client.py | 2 +- siriuspy/siriuspy/clientarch/pvarch.py | 12 +----------- 2 files changed, 2 insertions(+), 12 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 9b1507dfa..284e1ba2f 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -127,7 +127,7 @@ def server_url(self, url): @property def query_bin_interval(self): - """Parallel query bin interval.""" + """Query bin interval.""" return self._query_bin_interval @query_bin_interval.setter diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index 0cff51e72..a94c4b052 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -13,7 +13,6 @@ class _Base: - DEF_PARALLEL_QUERY_BIN_INTERVAL = 12 * 60 * 60 # 12h def __init__(self, connector=None, offline_data=False): self._connector = None @@ -79,7 +78,7 @@ def connected(self): @property def query_bin_interval(self): - """Parallel query bin interval.""" + """Query bin interval.""" return self.connector._query_bin_interval @query_bin_interval.setter @@ -323,9 +322,6 @@ def __init__(self, pvname, connector=None, offline_data=False): self._value = None self._status = None self._severity = None - self._parallel_query_bin_interval = ( - _Base.DEF_PARALLEL_QUERY_BIN_INTERVAL - ) @property def pvname(self): @@ -528,9 +524,6 @@ def __init__(self, pvnames, connector=None, offline_data=False): """Initialize.""" super().__init__(connector, offline_data=offline_data) self._pvnames = pvnames - self._parallel_query_bin_interval = ( - _Base.DEF_PARALLEL_QUERY_BIN_INTERVAL - ) self._pvdata = self._init_pvdatas(pvnames, self.connector) @property @@ -653,9 +646,6 @@ def _init_pvdatas(self, pvnames, connector): pvdata = dict() for pvname in pvnames: pvdata[pvname] = PVData(pvname, connector) - pvdata[ - pvname - ].parallel_query_bin_interval = self._parallel_query_bin_interval if self.time_start is not None: pvdata[pvname].time_start = self.time_start if self.time_stop is not None: From 4745a5dcb0a6a7649a46e855ed74056f77907eea Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 26 Mar 2026 14:55:27 -0300 Subject: [PATCH 13/60] ENH: (CLTARC.CLT) Improve error messages in _get_request_response. --- siriuspy/siriuspy/clientarch/client.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 284e1ba2f..21afe0283 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -616,7 +616,16 @@ async def _get_request_response(self, url, session, return_json): _log.error(f'Error with URL {response.url}') response = None except _asyncio.TimeoutError as err: - raise _exceptions.TimeoutError from err + raise _exceptions.TimeoutError( + 'Timeout reached. Try to increase `timeout`.' + ) from err + except _aio_exceptions.ClientPayloadError as err: + raise _exceptions.PayloadError( + "Payload Error. Increasing `timeout` won't help. " + 'Try decreasing query_bin_interval, or decrease the' + 'time interval for the aquisition.' + ) from err + return response async def _create_session(self, url, headers, payload, ssl): From f1f4a581d632a5518d76d7fcf2832c959ad3d90a Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 26 Mar 2026 14:59:39 -0300 Subject: [PATCH 14/60] STY: (CLTARC) Apply ruff formatter to files. --- siriuspy/siriuspy/clientarch/client.py | 2 +- siriuspy/siriuspy/clientarch/pvarch.py | 20 ++++++++++++-------- siriuspy/siriuspy/clientarch/time.py | 12 ++++-------- 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 21afe0283..9e0d404a5 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -22,7 +22,7 @@ try: from lzstring import LZString as _LZString -except: +except ModuleNotFoundError: _LZString = None from .. import envars as _envars diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index a94c4b052..e12e1efbf 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -3,8 +3,10 @@ from copy import deepcopy as _dcopy import numpy as _np -from mathphys.functions import load_pickle as _load_pickle, \ +from mathphys.functions import ( + load_pickle as _load_pickle, save_pickle as _save_pickle +) from .. import envars as _envars from . import exceptions as _exceptions @@ -13,7 +15,6 @@ class _Base: - def __init__(self, connector=None, offline_data=False): self._connector = None self._offline_data = offline_data @@ -161,7 +162,7 @@ def gen_archviewer_url_link( time_ref=None, pvoptnrpts=None, pvcolors=None, - pvusediff=False + pvusediff=False, ): """Generate a Archiver Viewer URL for the given PVs. @@ -199,7 +200,8 @@ def gen_archviewer_url_link( time_ref=time_ref, pvoptnrpts=pvoptnrpts, pvcolors=pvcolors, - pvusediff=pvusediff) + pvusediff=pvusediff, + ) return url @@ -389,7 +391,7 @@ def gen_archviewer_url_link( time_ref=None, pvoptnrpts=None, pvcolors=None, - pvusediff=False + pvusediff=False, ): """Generate a Archiver Viewer URL for the given PVs. @@ -431,7 +433,8 @@ def gen_archviewer_url_link( time_ref=time_ref, pvoptnrpts=pvoptnrpts, pvcolors=pvcolors, - pvusediff=pvusediff) + pvusediff=pvusediff, + ) return url def set_data(self, timestamp, value, status, severity): @@ -595,7 +598,7 @@ def gen_archviewer_url_link( time_ref=None, pvoptnrpts=None, pvcolors=None, - pvusediff=False + pvusediff=False, ): """Generate a Archiver Viewer URL for the given PVs. @@ -639,7 +642,8 @@ def gen_archviewer_url_link( time_ref=time_ref, pvoptnrpts=pvoptnrpts, pvcolors=pvcolors, - pvusediff=pvusediff) + pvusediff=pvusediff, + ) return url def _init_pvdatas(self, pvnames, connector): diff --git a/siriuspy/siriuspy/clientarch/time.py b/siriuspy/siriuspy/clientarch/time.py index da22f9b7d..94ca72fef 100644 --- a/siriuspy/siriuspy/clientarch/time.py +++ b/siriuspy/siriuspy/clientarch/time.py @@ -56,20 +56,16 @@ def __new__(cls, *args, **kwargs): # noqa: D417, C901 tzinfo (tzinfo): keyword/positional argument. Defaults to None. """ if not args and not kwargs: - raise TypeError( - 'no arguments found to build Time object' - ) + raise TypeError('no arguments found to build Time object') if len(args) == 1: arg = args[0] dic_ = { 'timestamp': (int, float), - 'timestamp_string': (str, ), - 'datetime': (_datetime, ), + 'timestamp_string': (str,), + 'datetime': (_datetime,), } if not isinstance(arg, sum(dic_.values(), ())): - raise TypeError( - f'Argument of unexpected type {type(arg)}' - ) + raise TypeError(f'Argument of unexpected type {type(arg)}') for key, typ in dic_.items(): if isinstance(arg, typ) and key not in kwargs: From d7bf29b45c79773a38026d35e249a0ddb5d91f5a Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 26 Mar 2026 15:41:51 -0300 Subject: [PATCH 15/60] BUG: (CLTARC.OTHER) Adapt other siriuspy subpackages to new clientarch interface. --- siriuspy/siriuspy/clientarch/devices.py | 6 +++--- siriuspy/siriuspy/currinfo/main.py | 2 +- siriuspy/siriuspy/machshift/macreport.py | 14 ++++++++++---- 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/devices.py b/siriuspy/siriuspy/clientarch/devices.py index e01969fc1..801aca0e6 100644 --- a/siriuspy/siriuspy/clientarch/devices.py +++ b/siriuspy/siriuspy/clientarch/devices.py @@ -40,7 +40,7 @@ def __init__(self, devname, propty='', connector=None): self._times = None self._values = None super().__init__(pvnames, connector=connector) - self._parallel_query_bin_interval = 3600 + self.query_bin_interval = 3600 @property def devnames(self): @@ -57,9 +57,9 @@ def values(self): """Return retrieved orbit interpolated values.""" return self._values - def update(self, mean_sec=None, parallel=True): + def update(self, mean_sec=None): """Update state by retrieving data.""" - super().update(mean_sec=mean_sec, parallel=parallel) + super().update(mean_sec=mean_sec) # interpolate data self._times, self._values = self._interpolate_data(mean_sec) diff --git a/siriuspy/siriuspy/currinfo/main.py b/siriuspy/siriuspy/currinfo/main.py index c85e58e64..c4a9823f4 100644 --- a/siriuspy/siriuspy/currinfo/main.py +++ b/siriuspy/siriuspy/currinfo/main.py @@ -55,7 +55,7 @@ def close(self): @staticmethod def _get_value_from_arch(pvname): carch = _ClientArch() - datetime = _datetime.now().isoformat() + '-03:00' + datetime = _datetime.now().astimezone().isoformat() return carch.getData(pvname, datetime, datetime) diff --git a/siriuspy/siriuspy/machshift/macreport.py b/siriuspy/siriuspy/machshift/macreport.py index 258b5faba..1356ea04d 100644 --- a/siriuspy/siriuspy/machshift/macreport.py +++ b/siriuspy/siriuspy/machshift/macreport.py @@ -1053,6 +1053,9 @@ def update(self): pvds.time_start = self._time_start pvds.time_stop = self._time_stop + init_bin_interval = self._connector.query_bin_interval + bin_interval = (self._time_start - self._time_stop).total_seconds() + 1 + self._update_log( 'Collecting archiver data ' f'({self.time_start.get_iso8601()} to' @@ -1062,26 +1065,29 @@ def update(self): # current _t0 = _time.time() - self._pvdata[self._current_pv].parallel_query_bin_interval = 60*60*6 + self._connector.query_bin_interval = 60 * 60 * 6 self._pvdata[self._current_pv].update(MacReport.QUERY_AVG_TIME) self._update_log(log_msg.format(self._current_pv, _time.time()-_t0)) + self._connector.query_bin_interval = bin_interval + # macshift, interlock and stability indicators for pvn in self._pvnames: if pvn == self._current_pv: continue - interval, parallel = None, False _t0 = _time.time() - self._pvdata[pvn].update(mean_sec=interval, parallel=parallel) + self._pvdata[pvn].update() self._update_log(log_msg.format(pvn, _time.time()-_t0)) # ps for group, pvdataset in self._pvdataset.items(): _t0 = _time.time() - pvdataset.update(parallel=False) + pvdataset.update() self._update_log(log_msg.format( 'SI PS '+group.capitalize(), _time.time()-_t0)) + self._connector.query_bin_interval = init_bin_interval + self._compute_stats() def plot_raw_data(self): From 908492163a33f82fa34be19b3cf4dfa7f9b48aa3 Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 26 Mar 2026 15:42:26 -0300 Subject: [PATCH 16/60] TST: (CLTARC.TIME) fix tests. --- siriuspy/tests/clientarch/test_clientarch.py | 44 +------------------- 1 file changed, 2 insertions(+), 42 deletions(-) diff --git a/siriuspy/tests/clientarch/test_clientarch.py b/siriuspy/tests/clientarch/test_clientarch.py index 6fcbd86cb..71d79f702 100644 --- a/siriuspy/tests/clientarch/test_clientarch.py +++ b/siriuspy/tests/clientarch/test_clientarch.py @@ -1,21 +1,10 @@ - - - - - - - - - #!/usr/bin/env python-sirius """Test the archiver client class.""" -from unittest import TestCase import datetime +from unittest import TestCase -from siriuspy.clientarch.time import Time, get_time_intervals - -import siriuspy.util as util +from siriuspy.clientarch.time import get_time_intervals, Time class TestClientArchTime(TestCase): @@ -134,32 +123,3 @@ def test_get_time_intervals(self): ) self.assertEqual(tst, time_start) self.assertEqual(tsp, time_stop) - - -class TestConfigServiceConTimestamp(TestCase): - """Test response error handling.""" - - def _test_conv_timestamp(self): - """Test timestamp conversion.""" - # TODO: NOT WORKING ON TRAVIS - samples = { - ("Dec 11, 2017", 1512957600.0), - ("12/11/2017", 1512957600.0), - ("2017/12/11", 1512957600.0), - ("2017-12-11", 1512957600.0), - ("Dec 11 2017 14:00:00", 1513008000.0), - ("12/11/2017 14:00:00", 1513008000.0), - ("2017/12/11 14:00:00", 1513008000.0), - ("2017-12-11 14:00:00", 1513008000.0), - ("2017-12-11T14:00:00", 1513008000.0), - ("2017-12-11 14:00:00+01:00", 1512997200.0), - ("2017-12-11T14:00:00+01:00", 1512997200.0), - ("2017-12-11T14:00:00.45", 1513008000.45), - } - - for sample in samples: - date_string = sample[0] - timestamp = sample[1] - self.assertEqual( - ConfigDBClient.conv_timestamp_txt_2_flt(date_string), - timestamp) From db9ed8ca1a37bb106c39289de664f439fa9ff405 Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 26 Mar 2026 17:22:25 -0300 Subject: [PATCH 17/60] BUG: (CLTARC) Fix bugs from last commits.. --- siriuspy/siriuspy/clientarch/client.py | 10 +++++++--- siriuspy/siriuspy/clientarch/time.py | 2 +- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 9e0d404a5..83032b2d7 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -306,10 +306,14 @@ def getData( tstarts, tstops = _get_time_intervals( tst, tsp, self.query_bin_interval, return_isoformat=True ) - tstamps_start.extend(tstarts) - tstamps_stop.extend(tstops) + if isinstance(tstarts, (list, tuple)): + tstamps_start.extend(tstarts) + tstamps_stop.extend(tstops) + else: + tstamps_start.append(tstarts) + tstamps_stop.append(tstops) - pvname_orig = list(pvname) + pvname_orig = list(pvnames) if process_type: process_str = process_type if interval is not None: diff --git a/siriuspy/siriuspy/clientarch/time.py b/siriuspy/siriuspy/clientarch/time.py index 94ca72fef..be1780082 100644 --- a/siriuspy/siriuspy/clientarch/time.py +++ b/siriuspy/siriuspy/clientarch/time.py @@ -105,7 +105,7 @@ def __new__(cls, *args, **kwargs): # noqa: D417, C901 else: tim = super().__new__(cls, *args, **kwargs) - return tim.replace(kwargs.get('tzinfo', tim.tzinfo)) + return tim.replace(tzinfo=kwargs.get('tzinfo', tim.tzinfo)) def get_iso8601(self): """Get iso8601 format.""" From 11f098997a5fcc69c9becb9a03c3168bf0fbd787 Mon Sep 17 00:00:00 2001 From: Fernando Date: Fri, 27 Mar 2026 18:12:22 -0300 Subject: [PATCH 18/60] API: (CLTARC.CLT) Major improvements in subpackage. Please, check change log at description of #1231 --- siriuspy/siriuspy/clientarch/client.py | 511 +++++++++++++----- siriuspy/siriuspy/clientarch/devices.py | 11 +- siriuspy/siriuspy/clientarch/pvarch.py | 449 +++++++++++---- siriuspy/siriuspy/currinfo/main.py | 2 +- siriuspy/siriuspy/machshift/macreport.py | 18 +- .../siriuspy/pwrsupply/tests/Untitled.ipynb | 8 +- siriuspy/tests/currinfo/test_main.py | 2 +- 7 files changed, 760 insertions(+), 241 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 83032b2d7..77d4a9fb0 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -8,6 +8,7 @@ import asyncio as _asyncio import logging as _log +import math as _math import urllib as _urllib from datetime import timedelta as _timedelta from threading import Thread as _Thread @@ -19,6 +20,7 @@ client_exceptions as _aio_exceptions, ClientSession as _ClientSession ) +from mathphys.functions import get_namedtuple as _get_namedtuple try: from lzstring import LZString as _LZString @@ -38,6 +40,53 @@ class ClientArchiver: SERVER_URL = _envars.SRVURL_ARCHIVER ENDPOINT = '/mgmt/bpl' + _REPORTS = { + 'DisconnectedPVs': 'getCurrentlyDisconnectedPVs', + 'PausedPVs': 'getPausedPVsReport', + 'EventRate': 'getEventRateReport', + 'StorageRate': 'getStorageRateReport', + 'RecentlyAddedPVs': 'getRecentlyAddedPVs', + 'RecentlyModifiedPVs': 'getRecentlyModifiedPVs', + 'LostConnections': 'getLostConnectionsReport', + 'LastKnownTimestamps': 'getSilentPVsReport', + 'DroppedEventsWrongTimestamp': 'getPVsByDroppedEventsTimestamp', + 'DroppedEventsBufferOverflow': 'getPVsByDroppedEventsBuffer', + 'DroppedEventsTypeChange': 'getPVsByDroppedEventsTypeChange', + } + ReportTypes = _get_namedtuple( + 'ReportTypes', _REPORTS.keys(), _REPORTS.values() + ) + _PROC_TYPES = { + 'None_': '', + 'TotalCount': 'ncount', + 'Mean': 'mean', + 'Median': 'median', + 'STD': 'std', + 'Variance': 'variance', + 'Popvariance': 'popvariance', + 'Kurtosis': 'kurtosis', + 'Skewness': 'skewness', + 'Min': 'mini', + 'Max': 'maxi', + 'STDoverMean': 'jitter', + 'Count': 'count', + 'FirstSample': 'firstSample', + 'LastSample': 'lastSample', + 'FirstFill': 'firstFill', + 'LastFill': 'lastFill', + 'Linear': 'linear', + 'Loess': 'loess', + 'Optimized': 'optimized', + 'OptimLastSample': 'optimLastSample', + 'NthSample': 'nth', + 'SelectByChange': 'deadBand', + 'IgnoreOutliers': 'ignoreflyers', + 'Outliers': 'flyers', + } + ProcessingTypes = _get_namedtuple( + 'ProcessingTypes', _PROC_TYPES.keys(), _PROC_TYPES.values() + ) + def __delete__(self): """Turn off thread when deleting.""" self.shutdown() @@ -89,7 +138,7 @@ def connected(self): if not self._loop_alive(): return False try: - resp = self._make_request(self._url, return_json=False) + resp = self.make_request(self._url, return_json=False) return resp.status == 200 except _urllib.error.URLError: return False @@ -174,50 +223,92 @@ def logout(self): return resp return None - def getPVsInfo(self, pvnames): - """Get PVs Info.""" - if isinstance(pvnames, (list, tuple)): - pvnames = ','.join(pvnames) - url = self._create_url(method='getPVStatus', pv=pvnames) - resp = self._make_request(url, return_json=True) + def get_pvs_info(self, wildcards='*', max_num_pvs=-1): + """Get PVs Info. + + Args: + wildcards (str|list|tuple): Wildcards to match. + max_num_pvs (int): Maximum number of PVs to return. + + Returns: + list: List of dictionary with PVs details. + + """ + if isinstance(wildcards, (list, tuple)): + wildcards = ','.join(wildcards) + + max_num_pvs = f'{int(max_num_pvs)}' + url = self._create_url( + method='getPVStatus', pv=wildcards, limit=max_num_pvs + ) + resp = self.make_request(url, return_json=True) return None if not resp else resp - def getAllPVs(self, pvnames): - """Get All PVs.""" - if isinstance(pvnames, (list, tuple)): - pvnames = ','.join(pvnames) - url = self._create_url(method='getAllPVs', pv=pvnames, limit='-1') - resp = self._make_request(url, return_json=True) + def get_all_pvs(self, wildcards='*', max_num_pvs=-1): + """Get All PVs matching wildcards. + + Args: + wildcards (str|list|tuple): Wildcards to match. + max_num_pvs (int): Maximum number of PVs to return. + + Returns: + list: List of dictionary with PVs details. + """ + if isinstance(wildcards, (list, tuple)): + wildcards = ','.join(wildcards) + + max_num_pvs = f'{int(max_num_pvs)}' + url = self._create_url( + method='getAllPVs', pv=wildcards, limit=max_num_pvs + ) + resp = self.make_request(url, return_json=True) return None if not resp else resp - def deletePVs(self, pvnames): + def delete_pvs(self, pvnames, delete_data=False): """Delete PVs.""" if not isinstance(pvnames, (list, tuple)): pvnames = (pvnames,) + + delete_data = 'true' if delete_data else 'false' for pvname in pvnames: url = self._create_url( - method='deletePV', pv=pvname, deleteData='true' + method='deletePV', pv=pvname, deleteData=delete_data ) - self._make_request(url, need_login=True) + self.make_request(url, need_login=True) + + def get_report(self, report_name='PausedPVs', max_num_pvs=None): + """Get Paused PVs Report. + + Args: + report_name (str): Report name. Use self.ReportTypes to get + all available reports. + max_num_pvs (int): Maximum number of PVs to return. - def getPausedPVsReport(self): - """Get Paused PVs Report.""" - url = self._create_url(method='getPausedPVsReport') - resp = self._make_request(url, return_json=True) + Returns: + dict: Report results. + + """ + method = getattr(self.ReportTypes, report_name) + if max_num_pvs is not None: + max_num_pvs = f'{int(max_num_pvs)}' + url = self._create_url(method=method, limit=max_num_pvs) + else: + url = self._create_url(method=method) + + resp = self.make_request(url, return_json=True) return None if not resp else resp - def getRecentlyModifiedPVs(self, limit=None, epoch_time=True): + def get_recently_modified_pvs(self, max_num_pvs=None, epoch_time=True): """Get list of PVs with recently modified PVTypeInfo. Currently version of the epics archiver appliance returns pvname list from oldest to newest modified timestamps. """ - method = 'getRecentlyModifiedPVs' - # get data - if limit is not None: - method += f'?limit={str(limit)}' - url = self._create_url(method=method) - resp = self._make_request(url, return_json=True) + resp = self.get_report( + self, + report_name=self.ReportTypes.RecentlyModifiedPVs, + max_num_pvs=max_num_pvs, + ) # convert to epoch, if the case if resp and epoch_time: @@ -230,56 +321,220 @@ def getRecentlyModifiedPVs(self, limit=None, epoch_time=True): return None if not resp else resp - def pausePVs(self, pvnames): + def pause_pvs(self, pvnames): """Pause PVs.""" if not isinstance(pvnames, (list, tuple)): pvnames = (pvnames,) for pvname in pvnames: url = self._create_url(method='pauseArchivingPV', pv=pvname) - self._make_request(url, need_login=True) + self.make_request(url, need_login=True) - def renamePV(self, oldname, newname): + def rename_pv(self, oldname, newname): """Rename PVs.""" url = self._create_url(method='renamePV', pv=oldname, newname=newname) - return self._make_request(url, need_login=True) + return self.make_request(url, need_login=True) - def resumePVs(self, pvnames): + def resume_pvs(self, pvnames): """Resume PVs.""" if not isinstance(pvnames, (list, tuple)): pvnames = (pvnames,) for pvname in pvnames: url = self._create_url(method='resumeArchivingPV', pv=pvname) - self._make_request(url, need_login=True) + self.make_request(url, need_login=True) - def getData( + def get_data( # noqa: D417 self, - pvname, + pvnames, timestamp_start, timestamp_stop, - process_type='', - interval=None, - stddev=None, - get_request_url=False, + query_bin_interval=None, + proc_type='', + proc_type_param1=None, + proc_type_param2=3.0, ): """Get archiver data. - pvname -- name of pv. - timestamp_start -- timestamp of interval start - Example: '2019-05-23T13:32:27.570Z' - timestamp_stop -- timestamp of interval stop - Example: '2019-05-23T13:32:27.570Z' - process_type -- data processing type to use. Can be: - '', 'mean', 'median', 'std', 'variance', - 'popvariance', 'kurtosis', 'skewness' - 'mini', 'maxi', 'jitter', 'count', 'ncount', - 'firstSample', 'lastSample', 'firstFill', 'lastFill', - 'nth', 'ignoreflyers' or 'flyers' - interval -- interval of the bin of data, in seconds - stddev -- number of standard deviations. - argument used in processing 'ignoreflyers' and 'flyers'. + Args: + pvnames (str|list|tuple): names of the PVs. + timestamp_start (str|int|Time|list|tuple): start time for query. + If it is a list or tuple, all PVs will be queried for each of + the time intervals. In this case, it must have the same length + as `timestamp_stop`. + timestamp_stop (str|int|Time|list|tuple): stop time for query. + If it is a list or tuple, all PVs will be queried for each of + the time intervals. In this case, it must have the same length + as `timestamp_start`. + query_bin_interval (float): overwrites `self.query_bin_interval`. + Defaults to `self.query_bin_interval`. Maximum interval for + queries. If + `timestamp_stop - timestamp_start > query_bin_interval`, + it will be split into parallel queries. + proc_type (str): data processing type to use for query. Defaults to + ''. For details on each operator, please, refer to the section + Processing of data of the following page: + https://epicsarchiver.readthedocs.io/en/latest/user/userguide.html + + The options implemented here are: + + The options below do not take any aditional parameter: + '' --> No processing, raw data is returned. + 'ncount' --> total number of updates in the whole interval. + + All types of processing below, require an aditional parameter, + controlled by the input `proc_type_param1`. Then the + refered statistics will be performed within this interval: + 'mean' + 'median' + 'std' + 'variance' + 'popvariance' --> population variance. + 'kurtosis' + 'skewness' + 'mini' --> same as min, which is also accepted by the archiver. + 'maxi' --> same as max, which is also accepted by the archiver. + 'jitter' --> std / mean for each bin. + 'count' --> number of updates in each bin. + 'firstSample' + 'lastSample' + 'firstFill' --> see url for difference to `'firstSample'`. + 'lastFill' --> see url for difference to `'lastSample'`. + 'linear' --> not sure, look at the archiver docs. + 'loess' --> not sure, look at the archiver docs. + + The processing below also use an aditional parameter, but its + meaning is different from the statistics above: + 'optimized' --> the parameter means the total number of points + to be returned, instead of the time interval. + 'optimLastSample' --> close to 'opimized'. See docs for diff. + 'nth' --> return every nth sample. + 'deadBand' --> similar to ADEL. Only return when values change + by a certain amount. + + For both statistics below a second parameter is needed to configure + acquisition, controlled by `proc_type_param2`. This + parameter controls the number of standard deviations to consider + in the filtering bellow. The default of this parameter is 3.0: + 'ignoreflyers' --> whether to ignore outliers + 'flyers' --> only return outliers + + proc_type_param1 (int): First parameter for data processing. See + `proc_type` for more details. + proc_type_param2 (int): Second parameter for data processing. See + `proc_type` for more details. + + Returns: + dict: a dictionary with PV names as keys and data as values. + """ - if isinstance(pvname, str): - pvname = [pvname] + if isinstance(pvnames, str): + pvnames = [pvnames] + + urls, pvn2idcs = self.get_request_url_for_get_data( + pvnames, + timestamp_start, + timestamp_stop, + query_bin_interval=query_bin_interval, + proc_type=proc_type, + proc_type_param1=proc_type_param1, + proc_type_param2=proc_type_param2, + return_pv2indcs_dict=True, + ) + urls = [urls] if isinstance(urls, str) else urls + + resps = self.make_request(urls, return_json=True) + if not resps: + return None + + return self.process_resquest_of_get_data(pvnames, resps, pvn2idcs) + + def get_request_url_for_get_data( # noqa: C901, D417 + self, + pvnames, + timestamp_start, + timestamp_stop, + query_bin_interval=None, + proc_type=None, + proc_type_param1=None, + proc_type_param2=None, + return_pvn2idcs_dict=False, + ): + """Get url for data request in `get_data` function. + + Args: + pvnames (str|list|tuple): names of the PVs. + timestamp_start (str|int|Time|list|tuple): start time for query. + If it is a list or tuple, all PVs will be queried for each of + the time intervals. In this case, it must have the same length + as `timestamp_stop`. + timestamp_stop (str|int|Time|list|tuple): stop time for query. + If it is a list or tuple, all PVs will be queried for each of + the time intervals. In this case, it must have the same length + as `timestamp_start`. + query_bin_interval (float): overwrites `self.query_bin_interval`. + Defaults to `self.query_bin_interval`. Maximum interval for + queries. If + `timestamp_stop - timestamp_start > query_bin_interval`, + it will be split into parallel queries. + proc_type (str): data processing type to use for query. Defaults to + ''. For details on each operator, please, refer to the section + Processing of data of the following page: + https://epicsarchiver.readthedocs.io/en/latest/user/userguide.html + + The options implemented here are: + + The options below do not take any aditional parameter: + '' --> No processing, raw data is returned. + 'ncount' --> total number of updates in the whole interval. + + All types of processing below, require an aditional parameter, + controlled by the input `proc_type_param1`. Then the + refered statistics will be performed within this interval: + 'mean' + 'median' + 'std' + 'variance' + 'popvariance' --> population variance. + 'kurtosis' + 'skewness' + 'mini' --> same as min, which is also accepted by the archiver. + 'maxi' --> same as max, which is also accepted by the archiver. + 'jitter' --> std / mean for each bin. + 'count' --> number of updates in each bin. + 'firstSample' + 'lastSample' + 'firstFill' --> see url for difference to `'firstSample'`. + 'lastFill' --> see url for difference to `'lastSample'`. + 'linear' --> not sure, look at the archiver docs. + 'loess' --> not sure, look at the archiver docs. + + The processing below also use an aditional parameter, but its + meaning is different from the statistics above: + 'optimized' --> the parameter means the total number of points + to be returned, instead of the time interval. + 'optimLastSample' --> close to 'opimized'. See docs for diff. + 'nth' --> return every nth sample. + 'deadBand' --> similar to ADEL. Only return when values change + by a certain amount. + + For both statistics below a second parameter is needed to configure + acquisition, controlled by `proc_type_param2`. This + parameter controls the number of standard deviations to consider + in the filtering bellow. The default of this parameter is 3.0: + 'ignoreflyers' --> whether to ignore outliers + 'flyers' --> only return outliers + + proc_type_param1 (int): First parameter for data processing. See + `proc_type` for more details. + proc_type_param2 (int): Second parameter for data processing. See + `proc_type` for more details. + return_pvn2idcs_dict (bool): whether to return a dictionary with + PV names as keys and indices as values. Defaults to False. + + Returns: + str|list|tuple: url or list of urls. + """ + if isinstance(pvnames, str): + pvnames = [pvnames] if not isinstance(timestamp_start, (list, tuple)): timestamp_start = [timestamp_start] @@ -291,6 +546,8 @@ def getData( '`timestamp_start` and `timestamp_stop` must have same length.' ) + bin_interval = query_bin_interval or self.query_bin_interval + tstamps_start = [] tstamps_stop = [] for tst, tsp in zip(timestamp_start, timestamp_stop): # noqa: B905 @@ -304,7 +561,7 @@ def getData( 'Or an iterable of these objects.' ) from err tstarts, tstops = _get_time_intervals( - tst, tsp, self.query_bin_interval, return_isoformat=True + tst, tsp, bin_interval, return_isoformat=True ) if isinstance(tstarts, (list, tuple)): tstamps_start.extend(tstarts) @@ -314,26 +571,30 @@ def getData( tstamps_stop.append(tstops) pvname_orig = list(pvnames) - if process_type: - process_str = process_type - if interval is not None: - process_str += '_' + str(int(interval)) - if 'flyers' in process_type and stddev is not None: - process_str += '_' + str(int(stddev)) - pvname = [process_str + '(' + pvn + ')' for pvn in pvname] + if proc_type: + process_str = proc_type + if proc_type != 'ncount' and proc_type_param1 is not None: + if 'deadBand' in process_str: + decim = -int(_math.log10(abs(proc_type_param1))) + 1 + process_str += f'_{proc_type_param1:{max(0, decim)}f}' + else: + process_str += f'_{int(proc_type_param1):d}' + if 'flyers' in proc_type and proc_type_param2 is not None: + process_str += f'_{proc_type_param2:.2f}' + pvnames = [process_str + '(' + pvn + ')' for pvn in pvnames] pvn2idcs = dict() all_urls = list() - for i, pvn in enumerate(pvname): + for i, pvn in enumerate(pvnames): urls = [] - for tstart, tstop in zip(tstamps_start, tstamps_stop): + for tst, tsp in zip(tstamps_start, tstamps_stop): # noqa: B905 urls.append( self._create_url( method='getData.json', pv=pvn, **{ - 'from': _urllib.parse.quote(tstart), - 'to': _urllib.parse.quote(tstop), + 'from': _urllib.parse.quote(tst), + 'to': _urllib.parse.quote(tsp), }, ) ) @@ -342,13 +603,23 @@ def getData( end = len(all_urls) pvn2idcs[pvname_orig[i]] = _np.arange(ini, end) - if get_request_url: - return all_urls[0] if len(all_urls) == 1 else all_urls + all_urls = all_urls[0] if len(all_urls) == 1 else all_urls + if return_pvn2idcs_dict: + return all_urls, pvn2idcs + return all_urls - resps = self._make_request(all_urls, return_json=True) - if not resps: - return None + def process_resquest_of_get_data(self, pvnames, resps, pvn2idcs): + """Process result of `self.get_data` request. + Args: + pvnames (list): list of PV names envolved in request. + resps (dict): output of `self.make_request` called from + `self.get_data`. + pvn2idcs (dict): list of pvnames to indices in `resps`. + + Returns: + pvn2resp (dict): dictionary with PVs data. + """ pvn2resp = dict() for pvn, idcs in pvn2idcs.items(): _ts, _vs = _np.array([]), list() @@ -366,14 +637,12 @@ def getData( _st = _np.r_[_st, [v['status'] for v in data]] _sv = _np.r_[_sv, [v['severity'] for v in data]] if not _ts.size: - timestamp, value, status, severity = [None, None, None, None] + timestamp = value = status = severity = None else: _, _tsidx = _np.unique(_ts, return_index=True) - timestamp, status, severity = ( - _ts[_tsidx], - _st[_tsidx], - _sv[_tsidx], - ) + timestamp = _ts[_tsidx], + status = _st[_tsidx], + severity = _sv[_tsidx], value = [_vs[i] for i in _tsidx] pvn2resp[pvn] = dict( @@ -383,16 +652,16 @@ def getData( severity=severity, ) - if len(pvname) == 1: - return pvn2resp[pvname_orig[0]] + if len(pvnames) == 1: + return pvn2resp[pvnames[0]] return pvn2resp - def getPVDetails(self, pvname, get_request_url=False): + def get_pv_details(self, pvname, get_request_url=False): """Get PV Details.""" url = self._create_url(method='getPVDetails', pv=pvname) if get_request_url: return url - resp = self._make_request(url, return_json=True) + resp = self.make_request(url, return_json=True) return None if not resp else resp def switch_to_online_data(self): @@ -405,6 +674,23 @@ def switch_to_offline_data(self): self.server_url = _envars.SRVURL_ARCHIVER_OFFLINE_DATA self.session = None + def make_request(self, url, need_login=False, return_json=False): + """Make request. + + Args: + url (str|list|tuple): url or list of urls to request. + need_login (bool): whether request requires login. + return_json (bool): whether to return json response. + + Returns: + dict: dictionary with response. + """ + self._request_url = url + coro = self._handle_request( + url, return_json=return_json, need_login=need_login + ) + return self._run_sync_coro(coro) + @staticmethod def gen_archviewer_url_link( pvnames, @@ -542,25 +828,16 @@ def _run_event_loop(self): finally: self._loop.close() - def _make_request(self, url, need_login=False, return_json=False): - """Make request.""" - self._request_url = url - coro = self._handle_request( - url, return_json=return_json, need_login=need_login - ) - return self._run_sync_coro(coro) - def _create_url(self, method, **kwargs): """Create URL.""" - url = self._url + url = self._url + self.ENDPOINT if method.startswith('getData.json'): - url += '/retrieval/data' - else: - url += self.ENDPOINT + url = self._url + '/retrieval/data' + url += '/' + method if kwargs: url += '?' - url += '&'.join(['{}={}'.format(k, v) for k, v in kwargs.items()]) + url += '&'.join([f'{k}={v}' for k, v in kwargs.items()]) return url def _run_sync_coro(self, coro): @@ -589,36 +866,24 @@ async def _handle_request(self, url, return_json=False, need_login=False): async def _get_request_response(self, url, session, return_json): """Get request response.""" + url = [url] if isinstance(url, str) else url try: - if isinstance(url, list): - response = await _asyncio.gather(*[ - session.get(u, ssl=False, timeout=self._timeout) - for u in url - ]) - if any([not r.ok for r in response]): - return None - if return_json: - jsons = list() - for res in response: - try: - data = await res.json() - jsons.append(data) - except ValueError: - _log.error(f'Error with URL {res.url}') - jsons.append(None) - response = jsons - else: - response = await session.get( - url, ssl=False, timeout=self._timeout - ) - if not response.ok: - return None - if return_json: + response = await _asyncio.gather(*[ + session.get(u, ssl=False, timeout=self._timeout) + for u in url + ]) + if any([not r.ok for r in response]): + return None + if return_json: + jsons = list() + for res in response: try: - response = await response.json() + data = await res.json() + jsons.append(data) except ValueError: - _log.error(f'Error with URL {response.url}') - response = None + _log.error('Error with URL %s', res.url) + jsons.append(None) + response = jsons except _asyncio.TimeoutError as err: raise _exceptions.TimeoutError( 'Timeout reached. Try to increase `timeout`.' @@ -630,6 +895,8 @@ async def _get_request_response(self, url, session, return_json): 'time interval for the aquisition.' ) from err + if len(url) == 1: + return response[0] return response async def _create_session(self, url, headers, payload, ssl): diff --git a/siriuspy/siriuspy/clientarch/devices.py b/siriuspy/siriuspy/clientarch/devices.py index 801aca0e6..bc911a10c 100644 --- a/siriuspy/siriuspy/clientarch/devices.py +++ b/siriuspy/siriuspy/clientarch/devices.py @@ -41,6 +41,8 @@ def __init__(self, devname, propty='', connector=None): self._values = None super().__init__(pvnames, connector=connector) self.query_bin_interval = 3600 + self.processing_type = self.ProcessingTypes.Mean + self.processing_type_param1 = 1 @property def devnames(self): @@ -57,12 +59,12 @@ def values(self): """Return retrieved orbit interpolated values.""" return self._values - def update(self, mean_sec=None): + def update(self, timeout=None): """Update state by retrieving data.""" - super().update(mean_sec=mean_sec) + super().update(timeout=timeout) # interpolate data - self._times, self._values = self._interpolate_data(mean_sec) + self._times, self._values = self._interpolate_data() # --- private methods --- @@ -71,9 +73,10 @@ def _get_pvnames(self): pvnames = [] return devnames, pvnames - def _interpolate_data(self, mean_sec): + def _interpolate_data(self): # calc mean_sec if not passed nr_pvs = len(self._pvdata) + mean_sec = self.processing_type_param1 if mean_sec is None: mean_sec = sum( map( diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index e12e1efbf..8b9933c5e 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -18,8 +18,6 @@ class _Base: def __init__(self, connector=None, offline_data=False): self._connector = None self._offline_data = offline_data - self._time_start = None - self._time_stop = None self.connector = connector self.connect() @@ -27,7 +25,7 @@ def __init__(self, connector=None, offline_data=False): def is_archived(self): """Is archived.""" self.connect() - return self.connector.getPVDetails(self.pvname) is not None + return self.connector.get_pv_details(self.pvname) is not None def connect(self): """Connect.""" @@ -77,15 +75,6 @@ def connected(self): return False return self.connector.connected - @property - def query_bin_interval(self): - """Query bin interval.""" - return self.connector._query_bin_interval - - @query_bin_interval.setter - def query_bin_interval(self, new_intvl): - self.connector.query_bin_interval = new_intvl - def switch_to_online_data(self): """.""" if self.connector: @@ -96,64 +85,6 @@ def switch_to_offline_data(self): if self.connector: self.connector.switch_to_offline_data() - @property - def timestamp_start(self): - """Timestamp start.""" - if not self._time_start: - return None - return self._time_start.timestamp() - - @timestamp_start.setter - def timestamp_start(self, new_timestamp): - if not isinstance(new_timestamp, (float, int)): - raise _exceptions.TypeError( - 'expected argument of type float or int, got ' - + str(type(new_timestamp)) - ) - self._time_start = _Time(timestamp=new_timestamp) - - @property - def time_start(self): - """Time start.""" - return self._time_start - - @time_start.setter - def time_start(self, new_time): - if not isinstance(new_time, _Time): - raise _exceptions.TypeError( - 'expected argument of type Time, got ' + str(type(new_time)) - ) - self._time_start = new_time - - @property - def timestamp_stop(self): - """Timestamp stop.""" - if not self._time_stop: - return None - return self._time_stop.timestamp() - - @timestamp_stop.setter - def timestamp_stop(self, new_timestamp): - if not isinstance(new_timestamp, (float, int)): - raise _exceptions.TypeError( - 'expected argument of type float or int, got ' - + str(type(new_timestamp)) - ) - self._time_stop = _Time(timestamp=new_timestamp) - - @property - def time_stop(self): - """Time stop.""" - return self._time_stop - - @time_stop.setter - def time_stop(self, new_time): - if not isinstance(new_time, _Time): - raise _exceptions.TypeError( - 'expected argument of type Time, got ' + str(type(new_time)) - ) - self._time_stop = new_time - def gen_archviewer_url_link( self, pvnames, @@ -246,24 +177,32 @@ def __init__(self, pvname, connector=None): def request_url(self): """.""" self.connect() - url = self.connector.getPVDetails(self.pvname, get_request_url=True) + url = self.connector.get_pv_details(self.pvname, get_request_url=True) return url @property def is_archived(self): """.""" self.connect() - data = self.connector.getPVDetails(self.pvname) + data = self.connector.get_pv_details(self.pvname) if not data: return False return True - def update(self, timeout=None): + def update(self, timeout=None): # noqa: C901 """.""" self.connect() + if timeout is not None: + timeout0 = self.timeout self.timeout = timeout - data = self.connector.getPVDetails(self.pvname) + + try: + data = self.connector.get_pv_details(self.pvname) + finally: + if timeout is not None: + self.timeout = timeout0 + if not data: return False for datum in data: @@ -284,6 +223,7 @@ def update(self, timeout=None): self.is_paused = value.lower() == 'yes' elif field == 'Is this PV currently connected?': self.is_connected = value.lower() == 'yes' + return True def __str__(self): @@ -316,6 +256,8 @@ def __str__(self): class PVData(_Base): """Archive PV Data.""" + ProcessingTypes = _ClientArchiver.ProcessingTypes + def __init__(self, pvname, connector=None, offline_data=False): """Initialize.""" super().__init__(connector, offline_data=offline_data) @@ -324,6 +266,12 @@ def __init__(self, pvname, connector=None, offline_data=False): self._value = None self._status = None self._severity = None + self._time_start = None + self._time_stop = None + self._query_bin_interval = self.connector.query_bin_interval + self._processing_type = self.ProcessingTypes.None_ + self._processing_type_param1 = None + self._processing_type_param2 = 3.0 # number of sigma @property def pvname(self): @@ -334,7 +282,7 @@ def pvname(self): def request_url(self): """Request url.""" self.connect() - url = self.connector.getData( + url = self.connector.get_data( self.pvname, self.time_start.get_iso8601(), self.time_stop.get_iso8601(), @@ -362,23 +310,145 @@ def severity(self): """Severity data.""" return self._severity - def update(self, mean_sec=None, timeout=None): + @property + def query_bin_interval(self): + """Query bin interval.""" + return self._query_bin_interval + + @query_bin_interval.setter + def query_bin_interval(self, new_intvl): + self._query_bin_interval = new_intvl + + @property + def timestamp_start(self): + """Timestamp start.""" + if not self._time_start: + return None + return self._time_start.timestamp() + + @timestamp_start.setter + def timestamp_start(self, new_timestamp): + if not isinstance(new_timestamp, (float, int)): + raise _exceptions.TypeError( + 'expected argument of type float or int, got ' + + str(type(new_timestamp)) + ) + self._time_start = _Time(timestamp=new_timestamp) + + @property + def time_start(self): + """Time start.""" + return self._time_start + + @time_start.setter + def time_start(self, new_time): + if not isinstance(new_time, _Time): + raise _exceptions.TypeError( + 'expected argument of type Time, got ' + str(type(new_time)) + ) + self._time_start = new_time + + @property + def timestamp_stop(self): + """Timestamp stop.""" + if not self._time_stop: + return None + return self._time_stop.timestamp() + + @timestamp_stop.setter + def timestamp_stop(self, new_timestamp): + if not isinstance(new_timestamp, (float, int)): + raise _exceptions.TypeError( + 'expected argument of type float or int, got ' + + str(type(new_timestamp)) + ) + self._time_stop = _Time(timestamp=new_timestamp) + + @property + def time_stop(self): + """Time stop.""" + return self._time_stop + + @time_stop.setter + def time_stop(self, new_time): + if not isinstance(new_time, _Time): + raise _exceptions.TypeError( + 'expected argument of type Time, got ' + str(type(new_time)) + ) + self._time_stop = new_time + + @property + def processing_type(self): + """Processing type.""" + return self._processing_type + + @processing_type.setter + def processing_type(self, new_type): + if not isinstance(new_type, str): + raise _exceptions.TypeError( + 'expected argument of type str, got ' + str(type(new_type)) + ) + elif new_type not in self.ProcessingTypes._fields: + raise _exceptions.ValueError( + f'invalid processing type: {new_type}. Must be one of: ' + '`self.ProcessingTypes` fields.' + ) + self._processing_type = new_type + + @property + def processing_type_param1(self): + """Processing type param1.""" + return self._processing_type_param1 + + @processing_type_param1.setter + def processing_type_param1(self, new_param): + if not isinstance(new_param, (int, float)): + raise _exceptions.TypeError( + 'expected argument of type int or float, got ' + + str(type(new_param)) + ) + self._processing_type_param1 = new_param + + @property + def processing_type_param2(self): + """Processing type param2.""" + return self._processing_type_param2 + + @processing_type_param2.setter + def processing_type_param2(self, new_param): + if not isinstance(new_param, (int, float)): + raise _exceptions.TypeError( + 'expected argument of type int or float, got ' + + str(type(new_param)) + ) + self._processing_type_param2 = new_param + + def update(self, timeout=None): """Update.""" self.connect() + if timeout is not None: + timeout0 = self.timeout self.timeout = timeout + if None in (self.timestamp_start, self.timestamp_stop): print('Start and stop timestamps not defined! Aborting.') return - process_type = 'mean' if mean_sec is not None else '' - - data = self.connector.getData( - self._pvname, - self.time_start, - self.time_stop, - process_type=process_type, - interval=mean_sec, - ) + + try: + data = self.connector.get_data( + self._pvname, + self.time_start, + self.time_stop, + query_bin_interval=self.query_bin_interval, + proc_type=self.processing_type, + proc_type_param1=self.processing_type_param1, + proc_type_param2=self.processing_type_param2, + ) + finally: + if timeout is not None: + self.timeout = timeout0 + if not data: return self.set_data(**data) @@ -523,6 +593,8 @@ def from_pickle(fname): class PVDataSet(_Base): """A set of PVData objects.""" + ProcessingTypes = _ClientArchiver.ProcessingTypes + def __init__(self, pvnames, connector=None, offline_data=False): """Initialize.""" super().__init__(connector, offline_data=offline_data) @@ -539,12 +611,160 @@ def pvnames(self, new_pvnames): self._pvnames = new_pvnames self._pvdata = self._init_pvdatas(new_pvnames, self.connector) + @property + def query_bin_interval(self): + """Query bin interval.""" + qry = [self._pvdata[pvn].query_bin_interval for pvn in self._pvnames] + if len(set(qry)) == 1: + return qry[0] + return qry + + @query_bin_interval.setter + def query_bin_interval(self, value): + if isinstance(value, int): + value = len(self._pvnames) * [value] + if len(value) != len(self._pvnames): + raise ValueError('value must have the same length as pvnames') + + for pvn, val in zip(self._pvnames, value): # noqa: B905 + self._pvdata[pvn].query_bin_interval = val + + @property + def time_start(self): + """Start time.""" + tstt = [self._pvdata[pvn].time_start for pvn in self._pvnames] + if len(set(tstt)) == 1: + return tstt[0] + return tstt + + @time_start.setter + def time_start(self, value): + if isinstance(value, _Time): + value = len(self._pvnames) * [value] + if len(value) != len(self._pvnames): + raise ValueError('value must have the same length as pvnames') + + for pvn, val in zip(self._pvnames, value): # noqa: B905 + self._pvdata[pvn].time_start = val + + @property + def timestamp_start(self): + """Start timestamp.""" + tstt = [self._pvdata[pvn].timestamp_start for pvn in self._pvnames] + if len(set(tstt)) == 1: + return tstt[0] + return tstt + + @timestamp_start.setter + def timestamp_start(self, value): + if isinstance(value, (int, float)): + value = len(self._pvnames) * [value] + if len(value) != len(self._pvnames): + raise ValueError('value must have the same length as pvnames') + + for pvn, val in zip(self._pvnames, value): # noqa: B905 + self._pvdata[pvn].timestamp_start = val + + @property + def time_stop(self): + """Stop time.""" + tstt = [self._pvdata[pvn].time_stop for pvn in self._pvnames] + if len(set(tstt)) == 1: + return tstt[0] + return tstt + + @time_stop.setter + def time_stop(self, value): + if isinstance(value, _Time): + value = len(self._pvnames) * [value] + if len(value) != len(self._pvnames): + raise ValueError('value must have the same length as pvnames') + + for pvn, val in zip(self._pvnames, value): # noqa: B905 + self._pvdata[pvn].time_stop = val + + @property + def timestamp_stop(self): + """Stop timestamp.""" + tstt = [self._pvdata[pvn].timestamp_stop for pvn in self._pvnames] + if len(set(tstt)) == 1: + return tstt[0] + return tstt + + @timestamp_stop.setter + def timestamp_stop(self, value): + if isinstance(value, (int, float)): + value = len(self._pvnames) * [value] + if len(value) != len(self._pvnames): + raise ValueError('value must have the same length as pvnames') + + for pvn, val in zip(self._pvnames, value): # noqa: B905 + self._pvdata[pvn].timestamp_stop = val + + @property + def processing_type(self): + """Processing type.""" + proc = [self._pvdata[pvn].processing_type for pvn in self._pvnames] + if len(set(proc)) == 1: + return proc[0] + return proc + + @processing_type.setter + def processing_type(self, value): + if isinstance(value, str): + value = len(self._pvnames) * [value] + if len(value) != len(self._pvnames): + raise ValueError('value must have the same length as pvnames') + + for pvn, val in zip(self._pvnames, value): # noqa: B905 + self._pvdata[pvn].processing_type = val + + @property + def processing_type_param1(self): + """Processing type param1.""" + param = [ + self._pvdata[pvn].processing_type_param1 for pvn in self._pvnames + ] + if len(set(param)) == 1: + return param[0] + return param + + @processing_type_param1.setter + def processing_type_param1(self, value): + if value is None or isinstance(value, (int, float)): + value = len(self._pvnames) * [value] + if len(value) != len(self._pvnames): + raise ValueError('value must have the same length as pvnames') + + for pvn, val in zip(self._pvnames, value): # noqa: B905 + self._pvdata[pvn].processing_type_param1 = val + + @property + def processing_type_param2(self): + """Processing type param2.""" + param = [ + self._pvdata[pvn].processing_type_param2 for pvn in self._pvnames + ] + if len(set(param)) == 1: + return param[0] + return param + + @processing_type_param2.setter + def processing_type_param2(self, value): + if value is None or isinstance(value, (int, float)): + value = len(self._pvnames) * [value] + if len(value) != len(self._pvnames): + raise ValueError('value must have the same length as pvnames') + + for pvn, val in zip(self._pvnames, value): # noqa: B905 + self._pvdata[pvn].processing_type_param2 = val + @property def is_archived(self): """Is archived.""" self.connect() for pvn in self._pvnames: - if self.connector.getPVDetails(pvn) is None: + if self.connector.get_pv_details(pvn) is None: return False return True @@ -554,7 +774,7 @@ def not_archived(self): self.connect() not_archived = list() for pvn in self._pvnames: - if self.connector.getPVDetails(pvn) is None: + if self.connector.get_pv_details(pvn) is None: not_archived.append(pvn) return not_archived @@ -564,22 +784,53 @@ def archived(self): archived = set(self._pvnames) - set(self.not_archived) return list(archived) - def update(self, mean_sec=None, timeout=None): + def update(self, timeout=None): """Update.""" self.connect() + if timeout is not None: + timeout0 = self.timeout self.timeout = timeout - if None in (self.timestamp_start, self.timestamp_stop): - print('Start and stop timestamps not defined! Aborting.') - return - process_type = 'mean' if mean_sec is not None else '' - - data = self.connector.getData( - self._pvnames, - self.time_start, - self.time_stop, - process_type=process_type, - interval=mean_sec, + + all_urls = [] + pvn2idcs = dict() + for pvn in self._pvnames: + pvd = self._pvdata[pvn] + if None in (pvd.timestamp_start, pvd.timestamp_stop): + print( + f'Start and stop times not defined for PV {pvn}' + '! Aborting.' + ) + if timeout is not None: + self.timeout = timeout0 + return + urls = self.connector.get_request_url_for_get_data( + pvn, + pvd.time_start, + pvd.time_stop, + query_bin_interval=pvd.query_bin_interval, + proc_type=pvd.processing_type, + proc_type_param1=pvd.processing_type_param1, + proc_type_param2=pvd.processing_type_param2, + return_pv2indcs_dict=False, + ) + urls = [urls] if isinstance(urls, str) else urls + ini = len(all_urls) + all_urls.extend(urls) + end = len(all_urls) + pvn2idcs[pvn] = _np.arange(ini, end) + + try: + resps = self.connector.make_request(all_urls, return_json=True) + finally: + if timeout is not None: + self.timeout = timeout0 + + if not resps: + return None + + data = self.connector.process_resquest_of_get_data( + self._pvnames, resps, pvn2idcs ) if not data: diff --git a/siriuspy/siriuspy/currinfo/main.py b/siriuspy/siriuspy/currinfo/main.py index c4a9823f4..74dcda0a0 100644 --- a/siriuspy/siriuspy/currinfo/main.py +++ b/siriuspy/siriuspy/currinfo/main.py @@ -56,7 +56,7 @@ def close(self): def _get_value_from_arch(pvname): carch = _ClientArch() datetime = _datetime.now().astimezone().isoformat() - return carch.getData(pvname, datetime, datetime) + return carch.get_data(pvname, datetime, datetime) class _ASCurrInfoApp(_CurrInfoApp): diff --git a/siriuspy/siriuspy/machshift/macreport.py b/siriuspy/siriuspy/machshift/macreport.py index 1356ea04d..9689cfcb0 100644 --- a/siriuspy/siriuspy/machshift/macreport.py +++ b/siriuspy/siriuspy/machshift/macreport.py @@ -1053,9 +1053,6 @@ def update(self): pvds.time_start = self._time_start pvds.time_stop = self._time_stop - init_bin_interval = self._connector.query_bin_interval - bin_interval = (self._time_start - self._time_stop).total_seconds() + 1 - self._update_log( 'Collecting archiver data ' f'({self.time_start.get_iso8601()} to' @@ -1065,11 +1062,13 @@ def update(self): # current _t0 = _time.time() - self._connector.query_bin_interval = 60 * 60 * 6 - self._pvdata[self._current_pv].update(MacReport.QUERY_AVG_TIME) - self._update_log(log_msg.format(self._current_pv, _time.time()-_t0)) + pvd = self._pvdata[self._current_pv] + pvd.query_bin_interval = 60 * 60 * 6 + pvd.processing_type = pvd.ProcessingTypes.Mean + pvd.processing_type_param1 = MacReport.QUERY_AVG_TIME + pvd.update() - self._connector.query_bin_interval = bin_interval + self._update_log(log_msg.format(self._current_pv, _time.time()-_t0)) # macshift, interlock and stability indicators for pvn in self._pvnames: @@ -1084,9 +1083,8 @@ def update(self): _t0 = _time.time() pvdataset.update() self._update_log(log_msg.format( - 'SI PS '+group.capitalize(), _time.time()-_t0)) - - self._connector.query_bin_interval = init_bin_interval + 'SI PS '+group.capitalize(), _time.time()-_t0) + ) self._compute_stats() diff --git a/siriuspy/siriuspy/pwrsupply/tests/Untitled.ipynb b/siriuspy/siriuspy/pwrsupply/tests/Untitled.ipynb index c5942f78b..2034c799e 100644 --- a/siriuspy/siriuspy/pwrsupply/tests/Untitled.ipynb +++ b/siriuspy/siriuspy/pwrsupply/tests/Untitled.ipynb @@ -101,10 +101,10 @@ "tstamp1 = ini_time.isoformat() + '-03:00'\n", "tstamp2 = end_time.isoformat() + '-03:00'\n", "\n", - "info11 = ca.getData(pvname11, tstamp1, tstamp2)\n", - "info12 = ca.getData(pvname12, tstamp1, tstamp2)\n", - "info13 = ca.getData(pvname13, tstamp1, tstamp2)\n", - "info14 = ca.getData(pvname14, tstamp1, tstamp2)\n", + "info11 = ca.get_data(pvname11, tstamp1, tstamp2)\n", + "info12 = ca.get_data(pvname12, tstamp1, tstamp2)\n", + "info13 = ca.get_data(pvname13, tstamp1, tstamp2)\n", + "info14 = ca.get_data(pvname14, tstamp1, tstamp2)\n", "time11, data11 = info11['timestamp'], info11['value']\n", "time12, data12 = info12['timestamp'], info12['value']\n", "time13, data13 = info13['timestamp'], info13['value']\n", diff --git a/siriuspy/tests/currinfo/test_main.py b/siriuspy/tests/currinfo/test_main.py index 7a8102bc4..6778087f4 100755 --- a/siriuspy/tests/currinfo/test_main.py +++ b/siriuspy/tests/currinfo/test_main.py @@ -39,7 +39,7 @@ def setUp(self): "siriuspy.currinfo.main._ClientArch", autospec=True) self.addCleanup(ca_patcher.stop) self.mock_ca = ca_patcher.start() - self.mock_ca.return_value.getData.return_value = None + self.mock_ca.return_value.get_data.return_value = None pv_patcher = mock.patch( "siriuspy.currinfo.main._PV", autospec=True) self.addCleanup(pv_patcher.stop) From 7a0f007091c70a8429fb02524e11e5abb8673b7f Mon Sep 17 00:00:00 2001 From: Fernando Date: Mon, 30 Mar 2026 16:14:34 -0300 Subject: [PATCH 19/60] BUG: (CLTARC.TIME) Fix Time class constructor to pass the tests. --- siriuspy/siriuspy/clientarch/time.py | 38 +++++++----- siriuspy/tests/clientarch/test_clientarch.py | 61 ++++++++++++++------ 2 files changed, 66 insertions(+), 33 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/time.py b/siriuspy/siriuspy/clientarch/time.py index be1780082..3248e8e15 100644 --- a/siriuspy/siriuspy/clientarch/time.py +++ b/siriuspy/siriuspy/clientarch/time.py @@ -75,11 +75,6 @@ def __new__(cls, *args, **kwargs): # noqa: D417, C901 raise TypeError( 'Conflicting positional and keyword arguments.' ) - - if not {'timestamp', 'timestamp_string'} - kwargs.keys(): - raise TypeError( - 'Conflicting positional and keyword arguments.' - ) elif len(args) == 8: if 'tzinfo' in kwargs: raise TypeError( @@ -88,24 +83,35 @@ def __new__(cls, *args, **kwargs): # noqa: D417, C901 kwargs['tzinfo'] = args[7] args = args[:7] - tim = None + if not {'timestamp', 'timestamp_string'} - kwargs.keys(): + raise TypeError( + 'Conflicting positional and keyword arguments.' + ) + + tz = kwargs.get('tzinfo') + tzl = _datetime.now().astimezone().tzinfo if 'datetime' in kwargs: - tim = Time.fromtimestamp(kwargs['datetime'].timestamp()) + dtim = kwargs['datetime'] + tz = tz or dtim.tzinfo or tzl + return super().fromtimestamp(dtim.timestamp(), tz=tz) elif 'timestamp' in kwargs: - tim = Time.fromtimestamp(kwargs['timestamp']) + return super().fromtimestamp(kwargs['timestamp'], tz=tz or tzl) elif 'timestamp_string' in kwargs: - ts_fmt = kwargs.get( - 'timestamp_format', Time._DEFAULT_TIMESTAMP_FORMAT - ) ts_str = kwargs['timestamp_string'] try: - tim = Time.strptime(ts_str, ts_fmt) + ts_fmt = kwargs.get( + 'timestamp_format', Time._DEFAULT_TIMESTAMP_FORMAT + ) + return super().strptime(ts_str, ts_fmt).replace( + tzinfo=tz or tzl + ) except ValueError: - return Time.fromisoformat(ts_str) + tim = super().fromisoformat(ts_str) + tz = tz or tim.tzinfo + return super().fromtimestamp(tim.timestamp(), tz=tz) else: - tim = super().__new__(cls, *args, **kwargs) - - return tim.replace(tzinfo=kwargs.get('tzinfo', tim.tzinfo)) + kwargs.setdefault('tzinfo', tzl) + return super().__new__(cls, *args, **kwargs) def get_iso8601(self): """Get iso8601 format.""" diff --git a/siriuspy/tests/clientarch/test_clientarch.py b/siriuspy/tests/clientarch/test_clientarch.py index 71d79f702..90c39bc6e 100644 --- a/siriuspy/tests/clientarch/test_clientarch.py +++ b/siriuspy/tests/clientarch/test_clientarch.py @@ -1,7 +1,9 @@ #!/usr/bin/env python-sirius """Test the archiver client class.""" + import datetime +import traceback from unittest import TestCase from siriuspy.clientarch.time import get_time_intervals, Time @@ -16,30 +18,33 @@ def test_constructor(self): tim_dt = datetime.datetime(2025, 1, 8, 10, 13, 14, 4587, tz_local) tim_dt_naive = datetime.datetime(2025, 1, 8, 10, 13, 14, 4587) try: - tim_naive = Time(2025, 1, 8) tim_naive = Time(2025, 1, 8, 10) tim_naive = Time(2025, 1, 8, 10, 13) tim_naive = Time(2025, 1, 8, 10, 13, 14) tim_naive = Time(2025, 1, 8, 10, 13, 14, 4587) + tim_naive = Time(tim_naive) tim_ts1 = Time(tim_naive.timestamp()) - tim_ts2 = Time(tim_naive.strftime( - tim_naive._DEFAULT_TIMESTAMP_FORMAT) + tim_ts2 = Time( + tim_naive.strftime(tim_naive._DEFAULT_TIMESTAMP_FORMAT) ) tim_ts3 = Time(tim_naive.get_iso8601()) tim_ts4 = Time(tim_dt_naive) tim_ts5 = Time(tim_dt) + tim_ts6 = Time(tim_naive) except Exception as err: + traceback.print_exc() self.fail(err) self.assertEqual(tim_ts1, tim_naive) self.assertEqual(tim_ts2, tim_naive) - self.assertNotEqual(tim_ts3, tim_naive) + self.assertEqual(tim_ts3, tim_naive) self.assertEqual(tim_ts4, tim_naive) - self.assertNotEqual(tim_ts5, tim_naive) + self.assertEqual(tim_ts5, tim_naive) + self.assertEqual(tim_ts6, tim_naive) - tz_info = datetime.timezone(datetime.timedelta(seconds=-3600)) + tz_info = datetime.timezone(datetime.timedelta(seconds=-1 * 3600)) try: tim = Time(2025, 1, 8, 10, 13, 14, 4587, tz_info) tim = Time(2025, 1, 8, tzinfo=tz_info) @@ -51,25 +56,47 @@ def test_constructor(self): tim_ts1 = Time(tim_naive.timestamp(), tzinfo=tz_info) tim_ts2 = Time( tim_naive.strftime(tim_naive._DEFAULT_TIMESTAMP_FORMAT), - tzinfo=tz_info + tzinfo=tz_info, ) tim_ts3 = Time(tim_naive.get_iso8601(), tzinfo=tz_info) tim_ts4 = Time(tim_dt) - tim_ts5 = Time(tim_dt, tzinfo=tz_info) + tim_ts5 = Time(tim_dt_naive, tzinfo=tz_info) + tim_ts6 = Time(tim_naive, tzinfo=tz_info) + tim_ts7 = Time(tim) except Exception as err: + traceback.print_exc() self.fail(err) self.assertNotEqual(tim, tim_naive) - self.assertEqual(tim_ts1, tim) + self.assertNotEqual(tim.timestamp(), tim_naive.timestamp()) + + self.assertNotEqual(tim_ts1, tim) + self.assertNotEqual(tim_ts1.timestamp(), tim.timestamp()) + self.assertEqual(tim_ts2, tim) - self.assertEqual(tim_ts3, tim) + + self.assertNotEqual(tim_ts3, tim) + self.assertNotEqual(tim_ts3.timestamp(), tim.timestamp()) + self.assertEqual(tim_ts1, tim_ts3) + self.assertNotEqual(tim_ts4, tim) - self.assertEqual(tim_ts5, tim) + self.assertNotEqual(tim_ts4.timestamp(), tim.timestamp()) + self.assertEqual(tim_ts3, tim_ts4) + + self.assertNotEqual(tim_ts5, tim) + self.assertNotEqual(tim_ts5.timestamp(), tim.timestamp()) + self.assertEqual(tim_ts4, tim_ts5) + + self.assertNotEqual(tim_ts6, tim) + self.assertNotEqual(tim_ts6.timestamp(), tim.timestamp()) + self.assertEqual(tim_ts5, tim_ts6) + + self.assertEqual(tim_ts7, tim) with self.assertRaises(ValueError): Time('2025-01-ladieno') with self.assertRaises(TypeError): - Time((tim, )) + Time((tim,)) ts_int = tim.timestamp() ts_str = tim.get_iso8601() with self.assertRaises(TypeError): @@ -90,18 +117,18 @@ def test_constructor(self): def test_get_time_intervals(self): """Test get_time_intervals.""" time_start = Time(2026, 1, 13, 0, 0, 0, 345) - time_stop = time_start + 24*3600 - interval = 3600*10 + time_stop = time_start + 24 * 3600 + interval = 3600 * 10 tst_corr = [ '2026-01-13T00:00:00.000345-03:00', '2026-01-13T10:00:00.000345-03:00', - '2026-01-13T20:00:00.000345-03:00' + '2026-01-13T20:00:00.000345-03:00', ] tsp_corr = [ '2026-01-13T10:00:00.000345-03:00', '2026-01-13T20:00:00.000345-03:00', - '2026-01-14T00:00:00.000345-03:00' + '2026-01-14T00:00:00.000345-03:00', ] tst, tsp = get_time_intervals( time_start, time_stop, interval, return_isoformat=True @@ -117,7 +144,7 @@ def test_get_time_intervals(self): self.assertEqual(tst, tst_corr) self.assertEqual(tsp, tsp_corr) - time_stop = time_start + 4*3600 + time_stop = time_start + 4 * 3600 tst, tsp = get_time_intervals( time_start, time_stop, interval, return_isoformat=False ) From 262783c808b544508b7f40ed2c2a2b3514e0bda4 Mon Sep 17 00:00:00 2001 From: Fernando Date: Mon, 30 Mar 2026 16:52:53 -0300 Subject: [PATCH 20/60] ENH: (CLTARCH.CLT) Adds configurable concurrency limit for async queries Introduces a maximum concurrency parameter for managing the number of simultaneous asynchronous queries to the server, improving resource control and preventing overload. Allows dynamic adjustment of concurrency at runtime via a property, and ensures requests are correctly throttled using an asyncio semaphore. Also fixes an issue with tuple assignment in value extraction. --- siriuspy/siriuspy/clientarch/client.py | 45 +++++++++++++++++++++----- 1 file changed, 37 insertions(+), 8 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 77d4a9fb0..d9f651b13 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -18,7 +18,7 @@ import urllib3 as _urllib3 from aiohttp import ( client_exceptions as _aio_exceptions, - ClientSession as _ClientSession + ClientSession as _ClientSession, ) from mathphys.functions import get_namedtuple as _get_namedtuple @@ -36,6 +36,7 @@ class ClientArchiver: """Archiver Data Fetcher class.""" DEF_QUERY_BIN_INTERVAL = 12 * 60 * 60 # 12h + DEF_QUERY_MAX_CONCURRENCY = 100 # maximum number of concurrent queries DEFAULT_TIMEOUT = 5.0 # [s] SERVER_URL = _envars.SRVURL_ARCHIVER ENDPOINT = '/mgmt/bpl' @@ -98,8 +99,9 @@ def __init__(self, server_url=None, timeout=None): self._timeout = timeout self._url = server_url or self.SERVER_URL self._request_url = None - self._thread = self._loop = None + self._thread = self._loop = self._semaphore = None self._query_bin_interval = self.DEF_QUERY_BIN_INTERVAL + self._query_max_concurrency = self.DEF_QUERY_MAX_CONCURRENCY self.connect() _urllib3.disable_warnings(_urllib3.exceptions.InsecureRequestWarning) @@ -113,6 +115,10 @@ def connect(self): return self._loop = _asyncio.new_event_loop() + self._semaphore = _asyncio.Semaphore( + self._query_max_concurrency, loop=self._loop + ) # limit concurrent requests + self._thread = _Thread(target=self._run_event_loop, daemon=True) self._thread.start() @@ -176,7 +182,7 @@ def server_url(self, url): @property def query_bin_interval(self): - """Query bin interval.""" + """Queries larger than this interval will be split.""" return self._query_bin_interval @query_bin_interval.setter @@ -188,6 +194,23 @@ def query_bin_interval(self, new_intvl): ) self._query_bin_interval = new_intvl + @property + def query_max_concurrency(self): + """Maximum number of concurrent queries to server.""" + return self._query_max_concurrency + + @query_max_concurrency.setter + def query_max_concurrency(self, new_val): + if not isinstance(new_val, (float, int)): + raise _exceptions.TypeError( + 'expected argument of type float or int, got ' + + str(type(new_val)) + ) + self._query_max_concurrency = int(new_val) + self._semaphore = _asyncio.Semaphore( + self._query_max_concurrency, loop=self._loop + ) + @property def last_requested_url(self): """.""" @@ -640,9 +663,9 @@ def process_resquest_of_get_data(self, pvnames, resps, pvn2idcs): timestamp = value = status = severity = None else: _, _tsidx = _np.unique(_ts, return_index=True) - timestamp = _ts[_tsidx], - status = _st[_tsidx], - severity = _sv[_tsidx], + timestamp = _ts[_tsidx] + status = _st[_tsidx] + severity = _sv[_tsidx] value = [_vs[i] for i in _tsidx] pvn2resp[pvn] = dict( @@ -868,9 +891,15 @@ async def _get_request_response(self, url, session, return_json): """Get request response.""" url = [url] if isinstance(url, str) else url try: + + async def fetch_with_limit(u): + async with self._semaphore: + return await session.get( + u, ssl=False, timeout=self._timeout + ) + response = await _asyncio.gather(*[ - session.get(u, ssl=False, timeout=self._timeout) - for u in url + fetch_with_limit(u) for u in url ]) if any([not r.ok for r in response]): return None From 8e050b83eb4187f747ad599a35c1393613412bc8 Mon Sep 17 00:00:00 2001 From: Fernando Date: Tue, 31 Mar 2026 10:53:38 -0300 Subject: [PATCH 21/60] BUG: (CLTARC) Fixes inconsistent argument names and processing type checks fix name of input variable of get_request_url_for_get_data and method to initialize pvdatas in PVDataSet. --- siriuspy/siriuspy/clientarch/client.py | 2 +- siriuspy/siriuspy/clientarch/pvarch.py | 15 ++++----------- 2 files changed, 5 insertions(+), 12 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index d9f651b13..38bda76a2 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -460,7 +460,7 @@ def get_data( # noqa: D417 proc_type=proc_type, proc_type_param1=proc_type_param1, proc_type_param2=proc_type_param2, - return_pv2indcs_dict=True, + return_pvn2idcs_dict=True, ) urls = [urls] if isinstance(urls, str) else urls diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index 8b9933c5e..07f8087d5 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -388,10 +388,10 @@ def processing_type(self, new_type): raise _exceptions.TypeError( 'expected argument of type str, got ' + str(type(new_type)) ) - elif new_type not in self.ProcessingTypes._fields: + elif new_type not in self.ProcessingTypes: raise _exceptions.ValueError( f'invalid processing type: {new_type}. Must be one of: ' - '`self.ProcessingTypes` fields.' + '`self.ProcessingTypes`.' ) self._processing_type = new_type @@ -812,7 +812,7 @@ def update(self, timeout=None): proc_type=pvd.processing_type, proc_type_param1=pvd.processing_type_param1, proc_type_param2=pvd.processing_type_param2, - return_pv2indcs_dict=False, + return_pvn2idcs_dict=False, ) urls = [urls] if isinstance(urls, str) else urls ini = len(all_urls) @@ -898,14 +898,7 @@ def gen_archviewer_url_link( return url def _init_pvdatas(self, pvnames, connector): - pvdata = dict() - for pvname in pvnames: - pvdata[pvname] = PVData(pvname, connector) - if self.time_start is not None: - pvdata[pvname].time_start = self.time_start - if self.time_stop is not None: - pvdata[pvname].time_stop = self._time_stop - return pvdata + return {pvname: PVData(pvname, connector) for pvname in pvnames} def __getitem__(self, val): """Get item.""" From b8d49a9c0c9b71bbfd294b0961f1e5181078247b Mon Sep 17 00:00:00 2001 From: Fernando Date: Tue, 31 Mar 2026 11:02:26 -0300 Subject: [PATCH 22/60] ENH: (CLTARC.CLT) Adds appliance and process metrics retrieval methods Introduces methods to fetch detailed and process metrics for the archiver appliance, improving observability and diagnostics. Also adds debugging output for URL requests to aid troubleshooting. --- siriuspy/siriuspy/clientarch/client.py | 33 ++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 38bda76a2..35e8c5332 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -299,6 +299,37 @@ def delete_pvs(self, pvnames, delete_data=False): ) self.make_request(url, need_login=True) + def get_detailed_appliance_metrics(self): + """Get detailed appliance metrics for archiver appliance. + + Returns: + response (dict|None): Response of the request. + """ + url = self._create_url( + method='getApplianceMetricsForAppliance', + appliance='lnls_control_appliance_1' + ) + resp = self.make_request(url, return_json=True) + return None if not resp else resp + + def get_process_metrics_for_appliance(self): + """Get process metrics for archiver appliance. + + Returns: + response (dict|None): Response of the request. The metrics + that are returned in case of success are: + - system load in % + - engine heap in % + - etl heap in % + - retrieval heap in % + """ + url = self._create_url( + method='getProcessMetricsDataForAppliance', + appliance='lnls_control_appliance_1' + ) + resp = self.make_request(url, return_json=True) + return None if not resp else resp + def get_report(self, report_name='PausedPVs', max_num_pvs=None): """Get Paused PVs Report. @@ -890,10 +921,12 @@ async def _handle_request(self, url, return_json=False, need_login=False): async def _get_request_response(self, url, session, return_json): """Get request response.""" url = [url] if isinstance(url, str) else url + print(f'\nNumber of urls: {len(url)}') try: async def fetch_with_limit(u): async with self._semaphore: + print(u) return await session.get( u, ssl=False, timeout=self._timeout ) From 9d63a1dc3e2cfbb43a620f365dcf0a80eecd3b70 Mon Sep 17 00:00:00 2001 From: Fernando Date: Tue, 31 Mar 2026 15:11:53 -0300 Subject: [PATCH 23/60] ENH: (CLTARC.CLT) Adds threaded request support alongside async option Introduces a `use_async` switch to allow requests via threads or async methods, improving flexibility and compatibility. Implements threaded login, logout, and request handling using the `requests` library and thread pools. Enables concurrent requests for environments where async is not preferred or supported. --- siriuspy/siriuspy/clientarch/client.py | 163 ++++++++++++++++++++++--- 1 file changed, 144 insertions(+), 19 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 35e8c5332..332b7def4 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -10,11 +10,14 @@ import logging as _log import math as _math import urllib as _urllib +from concurrent.futures import ThreadPoolExecutor as _ThreadPoolExecutor from datetime import timedelta as _timedelta from threading import Thread as _Thread from urllib.parse import quote as _quote import numpy as _np +import requests as _requests +import requests.exceptions as _requests_exceptions import urllib3 as _urllib3 from aiohttp import ( client_exceptions as _aio_exceptions, @@ -96,12 +99,14 @@ def __init__(self, server_url=None, timeout=None): """Initialize.""" timeout = timeout or ClientArchiver.DEFAULT_TIMEOUT self.session = None + self._threaded_session = None self._timeout = timeout self._url = server_url or self.SERVER_URL self._request_url = None self._thread = self._loop = self._semaphore = None self._query_bin_interval = self.DEF_QUERY_BIN_INTERVAL self._query_max_concurrency = self.DEF_QUERY_MAX_CONCURRENCY + self._use_async = True self.connect() _urllib3.disable_warnings(_urllib3.exceptions.InsecureRequestWarning) @@ -141,12 +146,16 @@ def shutdown(self, timeout=5): @property def connected(self): """Connected.""" - if not self._loop_alive(): + if self._use_async and not self._loop_alive(): return False try: resp = self.make_request(self._url, return_json=False) - return resp.status == 200 - except _urllib.error.URLError: + return ( + resp.status_code == 200 + if hasattr(resp, 'status_code') + else resp.status == 200 + ) + except (_urllib.error.URLError, _requests_exceptions.RequestException): return False @property @@ -211,6 +220,19 @@ def query_max_concurrency(self, new_val): self._query_max_concurrency, loop=self._loop ) + @property + def use_async(self): + """Choose between using async or threads for concurrency. + + async --> aiohttp for requests, asyncio for concurrency. + threads --> requests for requests, threading for concurrency. + """ + return self._use_async + + @use_async.setter + def use_async(self, value): + self._use_async = bool(value) + @property def last_requested_url(self): """.""" @@ -221,29 +243,55 @@ def login(self, username, password): headers = {'User-Agent': 'Mozilla/5.0'} payload = {'username': username, 'password': password} url = self._create_url(method='login') - coro = self._create_session( - url, headers=headers, payload=payload, ssl=False - ) - ret = self._run_sync_coro(coro) - if ret is not None: - self.session, authenticated = ret + if self._use_async: + coro = self._create_session( + url, headers=headers, payload=payload, ssl=False + ) + ret = self._run_sync_coro(coro) + if ret is not None: + self.session, authenticated = ret + if authenticated: + print( + 'Reminder: close connection after using this ' + 'session by calling logout method!' + ) + else: + self.logout() + return authenticated + else: + session = _requests.Session() + response = session.post( + url, + headers=headers, + data=payload, + verify=False, + timeout=self._timeout, + ) + authenticated = b'authenticated' in response.content if authenticated: + self._threaded_session = session print( 'Reminder: close connection after using this ' 'session by calling logout method!' ) else: - self.logout() + session.close() return authenticated return False def logout(self): """Close login session.""" - if self.session: - coro = self._close_session() - resp = self._run_sync_coro(coro) - self.session = None - return resp + if self._use_async: + if self.session: + coro = self._close_session() + resp = self._run_sync_coro(coro) + self.session = None + return resp + else: + if self._threaded_session: + self._threaded_session.close() + self._threaded_session = None + return True return None def get_pvs_info(self, wildcards='*', max_num_pvs=-1): @@ -740,10 +788,15 @@ def make_request(self, url, need_login=False, return_json=False): dict: dictionary with response. """ self._request_url = url - coro = self._handle_request( - url, return_json=return_json, need_login=need_login - ) - return self._run_sync_coro(coro) + if self._use_async: + coro = self._handle_request( + url, return_json=return_json, need_login=need_login + ) + return self._run_sync_coro(coro) + else: + return self._handle_request_threaded( + url, return_json=return_json, need_login=need_login + ) @staticmethod def gen_archviewer_url_link( @@ -974,3 +1027,75 @@ async def _create_session(self, url, headers, payload, ssl): async def _close_session(self): """Close session.""" return await self.session.close() + + # ---------- threaded methods ---------- + + def _handle_request_threaded( + self, url, return_json=False, need_login=False + ): + """Handle request with threads.""" + if self._threaded_session is not None: + response = self._get_request_response_threaded( + url, self._threaded_session, return_json + ) + elif need_login: + raise _exceptions.AuthenticationError('You need to login first.') + else: + with _requests.Session() as sess: + response = self._get_request_response_threaded( + url, sess, return_json + ) + return response + + def _get_request_response_threaded(self, url, session, return_json): + """Get request response with threads.""" + url = [url] if isinstance(url, str) else url + print(f'\nNumber of urls: {len(url)}') + + def fetch(u): + print(u) + return session.get(u, verify=False, timeout=self._timeout) + + try: + with _ThreadPoolExecutor( + max_workers=self._query_max_concurrency + ) as executor: + # Submit all tasks and collect futures + futures = [executor.submit(fetch, u) for u in url] + responses = [] + + # Collect results with proper exception handling + for future in futures: + response = future.result(timeout=self._timeout) + responses.append(response) + except _requests_exceptions.Timeout as err: + raise _exceptions.TimeoutError( + 'Timeout reached. Try to increase `timeout`.' + ) from err + except _requests_exceptions.RequestException as err: + _log.exception('Request error: %s', err) + raise _exceptions.PayloadError( + "Request Error. Increasing `timeout` won't help. " + 'Try decreasing query_bin_interval, or decrease the ' + 'time interval for the acquisition.' + ) from err + except Exception as err: + _log.exception('Unexpected error in request: %s', err) + raise + + if any(not r.ok for r in responses): + return None + if return_json: + jsons = [] + for res in responses: + try: + data = res.json() + jsons.append(data) + except ValueError: + _log.error('Error with URL %s', res.url) + jsons.append(None) + responses = jsons + + if len(url) == 1: + return responses[0] + return responses From 86870ae79b490fb863545b795ba421f920959073 Mon Sep 17 00:00:00 2001 From: Fernando Date: Tue, 31 Mar 2026 15:14:22 -0300 Subject: [PATCH 24/60] MNT: (CLTARC.CLT) Fixes argument formatting and docstring indentation Corrects indentation in a docstring for clarity and adds missing trailing commas in argument lists to prevent potential syntax issues. Improves code readability and consistency. --- siriuspy/siriuspy/clientarch/client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 332b7def4..d0d0ae834 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -224,8 +224,8 @@ def query_max_concurrency(self, new_val): def use_async(self): """Choose between using async or threads for concurrency. - async --> aiohttp for requests, asyncio for concurrency. - threads --> requests for requests, threading for concurrency. + async --> aiohttp for requests, asyncio for concurrency. + threads --> requests for requests, threading for concurrency. """ return self._use_async @@ -355,7 +355,7 @@ def get_detailed_appliance_metrics(self): """ url = self._create_url( method='getApplianceMetricsForAppliance', - appliance='lnls_control_appliance_1' + appliance='lnls_control_appliance_1', ) resp = self.make_request(url, return_json=True) return None if not resp else resp @@ -373,7 +373,7 @@ def get_process_metrics_for_appliance(self): """ url = self._create_url( method='getProcessMetricsDataForAppliance', - appliance='lnls_control_appliance_1' + appliance='lnls_control_appliance_1', ) resp = self.make_request(url, return_json=True) return None if not resp else resp From 48ffa1268e0bb99936092178ff00def4cf166516 Mon Sep 17 00:00:00 2001 From: Fernando Date: Wed, 1 Apr 2026 11:41:37 -0300 Subject: [PATCH 25/60] ENH: (CLTARC.CLT) Adds flexible query method selection for requests Introduces a unified interface to choose between async, threaded, and serial query methods for network operations, improving flexibility and control over concurrency. Replaces the simple async/threaded flag with a more robust approach, updates session handling, and adds serial (synchronous) support. Improves error handling and logs additional debug information. Helps address cases where async may not be suitable, enabling users to switch strategies as needed. --- siriuspy/siriuspy/clientarch/client.py | 244 +++++++++++++++++-------- 1 file changed, 169 insertions(+), 75 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index d0d0ae834..19ce08661 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -90,32 +90,36 @@ class ClientArchiver: ProcessingTypes = _get_namedtuple( 'ProcessingTypes', _PROC_TYPES.keys(), _PROC_TYPES.values() ) + QueryMethods = _get_namedtuple( + 'QueryMethods', ('Async', 'Threaded', 'Serial') + ) def __delete__(self): """Turn off thread when deleting.""" + self.logout() self.shutdown() def __init__(self, server_url=None, timeout=None): """Initialize.""" timeout = timeout or ClientArchiver.DEFAULT_TIMEOUT self.session = None - self._threaded_session = None + self._aiohttp_session = None + self._requests_session = None self._timeout = timeout self._url = server_url or self.SERVER_URL self._request_url = None self._thread = self._loop = self._semaphore = None self._query_bin_interval = self.DEF_QUERY_BIN_INTERVAL self._query_max_concurrency = self.DEF_QUERY_MAX_CONCURRENCY - self._use_async = True + self._query_method = self.QueryMethods.Async self.connect() _urllib3.disable_warnings(_urllib3.exceptions.InsecureRequestWarning) def connect(self): - """Starts bg. event loop in a separate thread. + """Starts bg. event loop in a separate thread when in async mode.""" + if self._query_method != self.QueryMethods.Async: + return - Raises: - RuntimeError: when library is alread connected. - """ if self._loop_alive(): return @@ -129,6 +133,9 @@ def connect(self): def shutdown(self, timeout=5): """Safely stops the bg. loop and waits for the thread to exit.""" + if self._query_method != self.QueryMethods.Async: + return + if not self._loop_alive(): return @@ -146,7 +153,10 @@ def shutdown(self, timeout=5): @property def connected(self): """Connected.""" - if self._use_async and not self._loop_alive(): + if ( + self._query_method == self.QueryMethods.Async + and not self._loop_alive() + ): return False try: resp = self.make_request(self._url, return_json=False) @@ -189,6 +199,13 @@ def server_url(self, url): self.logout() self._url = url + @property + def session(self): + """.""" + if self._query_method == self.QueryMethods.Async: + return self._aiohttp_session + return self._requests_session + @property def query_bin_interval(self): """Queries larger than this interval will be split.""" @@ -221,17 +238,41 @@ def query_max_concurrency(self, new_val): ) @property - def use_async(self): - """Choose between using async or threads for concurrency. + def query_method(self): + """Define the request method to be used for queries. - async --> aiohttp for requests, asyncio for concurrency. - threads --> requests for requests, threading for concurrency. + Options are: + 0: async (aiohttp + asyncio) + 1: threaded (requests + ThreadPoolExecutor) + 2: serial (requests, direct, no threads/async) """ - return self._use_async + return self._query_method - @use_async.setter - def use_async(self, value): - self._use_async = bool(value) + @property + def query_method_str(self): + """String representation of the query method.""" + return self.QueryMethods._fields[self._query_method] + + @query_method.setter + def query_method(self, value): + if isinstance(value, int) and value in self.QueryMethods: + pass + elif isinstance(value, str) and value in self.QueryMethods._fields: + value = getattr(self.QueryMethods, value) + else: + raise _exceptions.ValueError( + '`query_method` must be (0, 1, 2) or (Async, Threaded, Serial)' + ) + + if self._query_method != value and self.session is not None: + self.logout() + + if value == self.QueryMethods.Async: + self._query_method = value + self.connect() + else: + self.shutdown() + self._query_method = value @property def last_requested_url(self): @@ -243,13 +284,14 @@ def login(self, username, password): headers = {'User-Agent': 'Mozilla/5.0'} payload = {'username': username, 'password': password} url = self._create_url(method='login') - if self._use_async: + + if self._query_method == self.QueryMethods.Async: coro = self._create_session( url, headers=headers, payload=payload, ssl=False ) ret = self._run_sync_coro(coro) if ret is not None: - self.session, authenticated = ret + self._aiohttp_session, authenticated = ret if authenticated: print( 'Reminder: close connection after using this ' @@ -258,41 +300,41 @@ def login(self, username, password): else: self.logout() return authenticated - else: - session = _requests.Session() - response = session.post( - url, - headers=headers, - data=payload, - verify=False, - timeout=self._timeout, + return False + + if self._requests_session is not None: + self._requests_session.close() + + session = _requests.Session() + response = session.post( + url, + headers=headers, + data=payload, + verify=False, + timeout=self._timeout, + ) + authenticated = b'authenticated' in response.content + if authenticated: + self._requests_session = session + print( + 'Reminder: close connection after using this ' + 'session by calling logout method!' ) - authenticated = b'authenticated' in response.content - if authenticated: - self._threaded_session = session - print( - 'Reminder: close connection after using this ' - 'session by calling logout method!' - ) - else: - session.close() - return authenticated - return False + else: + session.close() + return authenticated def logout(self): """Close login session.""" - if self._use_async: - if self.session: - coro = self._close_session() - resp = self._run_sync_coro(coro) - self.session = None - return resp - else: - if self._threaded_session: - self._threaded_session.close() - self._threaded_session = None - return True - return None + resp = True + if self._aiohttp_session: + coro = self._close_session() + resp = self._run_sync_coro(coro) + self._aiohttp_session = None + if self._requests_session: + self._requests_session.close() + self._requests_session = None + return resp def get_pvs_info(self, wildcards='*', max_num_pvs=-1): """Get PVs Info. @@ -788,15 +830,20 @@ def make_request(self, url, need_login=False, return_json=False): dict: dictionary with response. """ self._request_url = url - if self._use_async: - coro = self._handle_request( - url, return_json=return_json, need_login=need_login - ) + _log.debug('Number of urls: %d', len(url)) + + kwargs = dict(return_json=return_json, need_login=need_login) + if self._query_method == self.QueryMethods.Async: + coro = self._handle_request_async(url, **kwargs) return self._run_sync_coro(coro) - else: - return self._handle_request_threaded( - url, return_json=return_json, need_login=need_login - ) + elif self._query_method == self.QueryMethods.Threaded: + return self._handle_request_threaded(url, **kwargs) + elif self._query_method == self.QueryMethods.Serial: + return self._handle_request_serial(url, **kwargs) + + raise _exceptions.ValueError( + '`query_method` must be (0, 1, 2) or (Async, Threaded, Serial)' + ) @staticmethod def gen_archviewer_url_link( @@ -912,9 +959,12 @@ def _process_url_link_args(pvnames, pvoptnrpts, pvcolors, pvusediff): def _loop_alive(self): """Check if thread is alive and loop is running.""" + if self._query_method != self.QueryMethods.Async: + return False return ( self._thread is not None and self._thread.is_alive() + and self._loop is not None and self._loop.is_running() ) @@ -956,7 +1006,9 @@ def _run_sync_coro(self, coro): # ---------- async methods ---------- - async def _handle_request(self, url, return_json=False, need_login=False): + async def _handle_request_async( + self, url, return_json=False, need_login=False + ): """Handle request.""" if self.session is not None: response = await self._get_request_response( @@ -974,12 +1026,11 @@ async def _handle_request(self, url, return_json=False, need_login=False): async def _get_request_response(self, url, session, return_json): """Get request response.""" url = [url] if isinstance(url, str) else url - print(f'\nNumber of urls: {len(url)}') try: async def fetch_with_limit(u): async with self._semaphore: - print(u) + _log.debug('Fetching URL: %s', u) return await session.get( u, ssl=False, timeout=self._timeout ) @@ -1006,8 +1057,10 @@ async def fetch_with_limit(u): except _aio_exceptions.ClientPayloadError as err: raise _exceptions.PayloadError( "Payload Error. Increasing `timeout` won't help. " - 'Try decreasing query_bin_interval, or decrease the' - 'time interval for the aquisition.' + 'Try:\n - decreasing `query_bin_interval`;\n - decrease the ' + 'time interval for the aquisition;\n - change the ' + '`query_max_concurrency` parameter\n - or using the ' + 'threaded or serial options for `query_method`.' ) from err if len(url) == 1: @@ -1034,9 +1087,9 @@ def _handle_request_threaded( self, url, return_json=False, need_login=False ): """Handle request with threads.""" - if self._threaded_session is not None: + if self._requests_session is not None: response = self._get_request_response_threaded( - url, self._threaded_session, return_json + url, self._requests_session, return_json ) elif need_login: raise _exceptions.AuthenticationError('You need to login first.') @@ -1050,21 +1103,17 @@ def _handle_request_threaded( def _get_request_response_threaded(self, url, session, return_json): """Get request response with threads.""" url = [url] if isinstance(url, str) else url - print(f'\nNumber of urls: {len(url)}') - def fetch(u): - print(u) + _log.debug('Fetching URL: %s', u) return session.get(u, verify=False, timeout=self._timeout) try: with _ThreadPoolExecutor( max_workers=self._query_max_concurrency ) as executor: - # Submit all tasks and collect futures futures = [executor.submit(fetch, u) for u in url] - responses = [] - # Collect results with proper exception handling + responses = [] for future in futures: response = future.result(timeout=self._timeout) responses.append(response) @@ -1072,12 +1121,57 @@ def fetch(u): raise _exceptions.TimeoutError( 'Timeout reached. Try to increase `timeout`.' ) from err - except _requests_exceptions.RequestException as err: - _log.exception('Request error: %s', err) - raise _exceptions.PayloadError( - "Request Error. Increasing `timeout` won't help. " - 'Try decreasing query_bin_interval, or decrease the ' - 'time interval for the acquisition.' + except Exception as err: + _log.exception('Unexpected error in request: %s', err) + raise + + if any(not r.ok for r in responses): + return None + if return_json: + jsons = [] + for res in responses: + try: + data = res.json() + jsons.append(data) + except ValueError: + _log.error('Error with URL %s', res.url) + jsons.append(None) + responses = jsons + + if len(url) == 1: + return responses[0] + return responses + + # ---------- serial methods ---------- + + def _handle_request_serial(self, url, return_json=False, need_login=False): + """Handle request sequentially (no threads/async).""" + if self._requests_session is not None: + response = self._get_request_response_serial( + url, self._requests_session, return_json + ) + elif need_login: + raise _exceptions.AuthenticationError('You need to login first.') + else: + with _requests.Session() as sess: + response = self._get_request_response_serial( + url, sess, return_json + ) + return response + + def _get_request_response_serial(self, url, session, return_json): + """Get request response sequentially with requests.""" + url = [url] if isinstance(url, str) else url + responses = [] + try: + for u in url: + _log.debug('Fetching URL: %s', u) + responses.append( + session.get(u, verify=False, timeout=self._timeout) + ) + except _requests_exceptions.Timeout as err: + raise _exceptions.TimeoutError( + 'Timeout reached. Try to increase `timeout`.' ) from err except Exception as err: _log.exception('Unexpected error in request: %s', err) From 10c32cb13b0cdb9c14a90a3afd2cbb0ea9505dba Mon Sep 17 00:00:00 2001 From: Fernando Date: Wed, 1 Apr 2026 16:15:19 -0300 Subject: [PATCH 26/60] MNT: (CLTARCH.CLT) Removes threaded and serial query support Simplifies query handling by eliminating threaded and serial methods, retaining only async-based requests. Reduces code complexity, removes unnecessary dependencies, and unifies session management. Focuses maintenance and feature development on the async workflow. --- siriuspy/siriuspy/clientarch/client.py | 265 +++---------------------- 1 file changed, 25 insertions(+), 240 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 19ce08661..f82494d79 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -10,18 +10,15 @@ import logging as _log import math as _math import urllib as _urllib -from concurrent.futures import ThreadPoolExecutor as _ThreadPoolExecutor from datetime import timedelta as _timedelta from threading import Thread as _Thread from urllib.parse import quote as _quote import numpy as _np -import requests as _requests -import requests.exceptions as _requests_exceptions import urllib3 as _urllib3 from aiohttp import ( client_exceptions as _aio_exceptions, - ClientSession as _ClientSession, + ClientSession as _ClientSession ) from mathphys.functions import get_namedtuple as _get_namedtuple @@ -90,9 +87,6 @@ class ClientArchiver: ProcessingTypes = _get_namedtuple( 'ProcessingTypes', _PROC_TYPES.keys(), _PROC_TYPES.values() ) - QueryMethods = _get_namedtuple( - 'QueryMethods', ('Async', 'Threaded', 'Serial') - ) def __delete__(self): """Turn off thread when deleting.""" @@ -111,15 +105,11 @@ def __init__(self, server_url=None, timeout=None): self._thread = self._loop = self._semaphore = None self._query_bin_interval = self.DEF_QUERY_BIN_INTERVAL self._query_max_concurrency = self.DEF_QUERY_MAX_CONCURRENCY - self._query_method = self.QueryMethods.Async self.connect() _urllib3.disable_warnings(_urllib3.exceptions.InsecureRequestWarning) def connect(self): """Starts bg. event loop in a separate thread when in async mode.""" - if self._query_method != self.QueryMethods.Async: - return - if self._loop_alive(): return @@ -133,9 +123,6 @@ def connect(self): def shutdown(self, timeout=5): """Safely stops the bg. loop and waits for the thread to exit.""" - if self._query_method != self.QueryMethods.Async: - return - if not self._loop_alive(): return @@ -153,19 +140,12 @@ def shutdown(self, timeout=5): @property def connected(self): """Connected.""" - if ( - self._query_method == self.QueryMethods.Async - and not self._loop_alive() - ): + if not self._loop_alive(): return False try: - resp = self.make_request(self._url, return_json=False) - return ( - resp.status_code == 200 - if hasattr(resp, 'status_code') - else resp.status == 200 - ) - except (_urllib.error.URLError, _requests_exceptions.RequestException): + resp = self.make_request(self._url + '/mgmt') + return resp.status == 200 + except _urllib.error.URLError: return False @property @@ -199,13 +179,6 @@ def server_url(self, url): self.logout() self._url = url - @property - def session(self): - """.""" - if self._query_method == self.QueryMethods.Async: - return self._aiohttp_session - return self._requests_session - @property def query_bin_interval(self): """Queries larger than this interval will be split.""" @@ -237,43 +210,6 @@ def query_max_concurrency(self, new_val): self._query_max_concurrency, loop=self._loop ) - @property - def query_method(self): - """Define the request method to be used for queries. - - Options are: - 0: async (aiohttp + asyncio) - 1: threaded (requests + ThreadPoolExecutor) - 2: serial (requests, direct, no threads/async) - """ - return self._query_method - - @property - def query_method_str(self): - """String representation of the query method.""" - return self.QueryMethods._fields[self._query_method] - - @query_method.setter - def query_method(self, value): - if isinstance(value, int) and value in self.QueryMethods: - pass - elif isinstance(value, str) and value in self.QueryMethods._fields: - value = getattr(self.QueryMethods, value) - else: - raise _exceptions.ValueError( - '`query_method` must be (0, 1, 2) or (Async, Threaded, Serial)' - ) - - if self._query_method != value and self.session is not None: - self.logout() - - if value == self.QueryMethods.Async: - self._query_method = value - self.connect() - else: - self.shutdown() - self._query_method = value - @property def last_requested_url(self): """.""" @@ -285,55 +221,27 @@ def login(self, username, password): payload = {'username': username, 'password': password} url = self._create_url(method='login') - if self._query_method == self.QueryMethods.Async: - coro = self._create_session( - url, headers=headers, payload=payload, ssl=False - ) - ret = self._run_sync_coro(coro) - if ret is not None: - self._aiohttp_session, authenticated = ret - if authenticated: - print( - 'Reminder: close connection after using this ' - 'session by calling logout method!' - ) - else: - self.logout() - return authenticated - return False - - if self._requests_session is not None: - self._requests_session.close() - - session = _requests.Session() - response = session.post( - url, - headers=headers, - data=payload, - verify=False, - timeout=self._timeout, + coro = self._create_session( + url, headers=headers, payload=payload, ssl=False ) - authenticated = b'authenticated' in response.content - if authenticated: - self._requests_session = session - print( - 'Reminder: close connection after using this ' - 'session by calling logout method!' - ) - else: - session.close() - return authenticated + ret = self._run_sync_coro(coro) + if ret is not None: + self.session, authenticated = ret + if authenticated: + print( + 'Reminder: close connection after using this ' + 'session by calling logout method!' + ) + else: + self.logout() + return authenticated + return False def logout(self): """Close login session.""" - resp = True - if self._aiohttp_session: - coro = self._close_session() - resp = self._run_sync_coro(coro) - self._aiohttp_session = None - if self._requests_session: - self._requests_session.close() - self._requests_session = None + coro = self._close_session() + resp = self._run_sync_coro(coro) + self.session = None return resp def get_pvs_info(self, wildcards='*', max_num_pvs=-1): @@ -832,18 +740,10 @@ def make_request(self, url, need_login=False, return_json=False): self._request_url = url _log.debug('Number of urls: %d', len(url)) - kwargs = dict(return_json=return_json, need_login=need_login) - if self._query_method == self.QueryMethods.Async: - coro = self._handle_request_async(url, **kwargs) - return self._run_sync_coro(coro) - elif self._query_method == self.QueryMethods.Threaded: - return self._handle_request_threaded(url, **kwargs) - elif self._query_method == self.QueryMethods.Serial: - return self._handle_request_serial(url, **kwargs) - - raise _exceptions.ValueError( - '`query_method` must be (0, 1, 2) or (Async, Threaded, Serial)' + coro = self._handle_request_async( + url, return_json=return_json, need_login=need_login ) + return self._run_sync_coro(coro) @staticmethod def gen_archviewer_url_link( @@ -959,8 +859,6 @@ def _process_url_link_args(pvnames, pvoptnrpts, pvcolors, pvusediff): def _loop_alive(self): """Check if thread is alive and loop is running.""" - if self._query_method != self.QueryMethods.Async: - return False return ( self._thread is not None and self._thread.is_alive() @@ -1080,116 +978,3 @@ async def _create_session(self, url, headers, payload, ssl): async def _close_session(self): """Close session.""" return await self.session.close() - - # ---------- threaded methods ---------- - - def _handle_request_threaded( - self, url, return_json=False, need_login=False - ): - """Handle request with threads.""" - if self._requests_session is not None: - response = self._get_request_response_threaded( - url, self._requests_session, return_json - ) - elif need_login: - raise _exceptions.AuthenticationError('You need to login first.') - else: - with _requests.Session() as sess: - response = self._get_request_response_threaded( - url, sess, return_json - ) - return response - - def _get_request_response_threaded(self, url, session, return_json): - """Get request response with threads.""" - url = [url] if isinstance(url, str) else url - def fetch(u): - _log.debug('Fetching URL: %s', u) - return session.get(u, verify=False, timeout=self._timeout) - - try: - with _ThreadPoolExecutor( - max_workers=self._query_max_concurrency - ) as executor: - futures = [executor.submit(fetch, u) for u in url] - - responses = [] - for future in futures: - response = future.result(timeout=self._timeout) - responses.append(response) - except _requests_exceptions.Timeout as err: - raise _exceptions.TimeoutError( - 'Timeout reached. Try to increase `timeout`.' - ) from err - except Exception as err: - _log.exception('Unexpected error in request: %s', err) - raise - - if any(not r.ok for r in responses): - return None - if return_json: - jsons = [] - for res in responses: - try: - data = res.json() - jsons.append(data) - except ValueError: - _log.error('Error with URL %s', res.url) - jsons.append(None) - responses = jsons - - if len(url) == 1: - return responses[0] - return responses - - # ---------- serial methods ---------- - - def _handle_request_serial(self, url, return_json=False, need_login=False): - """Handle request sequentially (no threads/async).""" - if self._requests_session is not None: - response = self._get_request_response_serial( - url, self._requests_session, return_json - ) - elif need_login: - raise _exceptions.AuthenticationError('You need to login first.') - else: - with _requests.Session() as sess: - response = self._get_request_response_serial( - url, sess, return_json - ) - return response - - def _get_request_response_serial(self, url, session, return_json): - """Get request response sequentially with requests.""" - url = [url] if isinstance(url, str) else url - responses = [] - try: - for u in url: - _log.debug('Fetching URL: %s', u) - responses.append( - session.get(u, verify=False, timeout=self._timeout) - ) - except _requests_exceptions.Timeout as err: - raise _exceptions.TimeoutError( - 'Timeout reached. Try to increase `timeout`.' - ) from err - except Exception as err: - _log.exception('Unexpected error in request: %s', err) - raise - - if any(not r.ok for r in responses): - return None - if return_json: - jsons = [] - for res in responses: - try: - data = res.json() - jsons.append(data) - except ValueError: - _log.error('Error with URL %s', res.url) - jsons.append(None) - responses = jsons - - if len(url) == 1: - return responses[0] - return responses From bc471e7f5c9dad9fac0a2297f6a0270b1cd79925 Mon Sep 17 00:00:00 2001 From: Fernando Date: Wed, 1 Apr 2026 16:22:06 -0300 Subject: [PATCH 27/60] BUG: (CLTARCH.CLT) Implement correct way of getting responses with aiohttp. Only return data in json format. Fall back to text when error occurs. Removes the option to return raw responses, ensuring all requests consistently parse and return JSON or text. Simplifies method signatures and internal logic, improving maintainability and reducing ambiguity around response formats. --- siriuspy/siriuspy/clientarch/client.py | 79 +++++++++++--------------- 1 file changed, 32 insertions(+), 47 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index f82494d79..bbea9889a 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -262,7 +262,7 @@ def get_pvs_info(self, wildcards='*', max_num_pvs=-1): url = self._create_url( method='getPVStatus', pv=wildcards, limit=max_num_pvs ) - resp = self.make_request(url, return_json=True) + resp = self.make_request(url) return None if not resp else resp def get_all_pvs(self, wildcards='*', max_num_pvs=-1): @@ -282,7 +282,7 @@ def get_all_pvs(self, wildcards='*', max_num_pvs=-1): url = self._create_url( method='getAllPVs', pv=wildcards, limit=max_num_pvs ) - resp = self.make_request(url, return_json=True) + resp = self.make_request(url) return None if not resp else resp def delete_pvs(self, pvnames, delete_data=False): @@ -307,7 +307,7 @@ def get_detailed_appliance_metrics(self): method='getApplianceMetricsForAppliance', appliance='lnls_control_appliance_1', ) - resp = self.make_request(url, return_json=True) + resp = self.make_request(url) return None if not resp else resp def get_process_metrics_for_appliance(self): @@ -325,7 +325,7 @@ def get_process_metrics_for_appliance(self): method='getProcessMetricsDataForAppliance', appliance='lnls_control_appliance_1', ) - resp = self.make_request(url, return_json=True) + resp = self.make_request(url) return None if not resp else resp def get_report(self, report_name='PausedPVs', max_num_pvs=None): @@ -347,7 +347,7 @@ def get_report(self, report_name='PausedPVs', max_num_pvs=None): else: url = self._create_url(method=method) - resp = self.make_request(url, return_json=True) + resp = self.make_request(url) return None if not resp else resp def get_recently_modified_pvs(self, max_num_pvs=None, epoch_time=True): @@ -493,7 +493,7 @@ def get_data( # noqa: D417 ) urls = [urls] if isinstance(urls, str) else urls - resps = self.make_request(urls, return_json=True) + resps = self.make_request(urls) if not resps: return None @@ -713,7 +713,7 @@ def get_pv_details(self, pvname, get_request_url=False): url = self._create_url(method='getPVDetails', pv=pvname) if get_request_url: return url - resp = self.make_request(url, return_json=True) + resp = self.make_request(url) return None if not resp else resp def switch_to_online_data(self): @@ -726,13 +726,12 @@ def switch_to_offline_data(self): self.server_url = _envars.SRVURL_ARCHIVER_OFFLINE_DATA self.session = None - def make_request(self, url, need_login=False, return_json=False): + def make_request(self, url, need_login=False): """Make request. Args: url (str|list|tuple): url or list of urls to request. need_login (bool): whether request requires login. - return_json (bool): whether to return json response. Returns: dict: dictionary with response. @@ -740,9 +739,7 @@ def make_request(self, url, need_login=False, return_json=False): self._request_url = url _log.debug('Number of urls: %d', len(url)) - coro = self._handle_request_async( - url, return_json=return_json, need_login=need_login - ) + coro = self._handle_request_async(url, need_login=need_login) return self._run_sync_coro(coro) @staticmethod @@ -904,50 +901,24 @@ def _run_sync_coro(self, coro): # ---------- async methods ---------- - async def _handle_request_async( - self, url, return_json=False, need_login=False - ): + async def _handle_request_async(self, url, need_login=False): """Handle request.""" if self.session is not None: - response = await self._get_request_response( - url, self.session, return_json - ) + response = await self._get_request_response(url, self.session) elif need_login: raise _exceptions.AuthenticationError('You need to login first.') else: async with _ClientSession() as sess: - response = await self._get_request_response( - url, sess, return_json - ) + response = await self._get_request_response(url, sess) return response - async def _get_request_response(self, url, session, return_json): + async def _get_request_response(self, url, session): """Get request response.""" url = [url] if isinstance(url, str) else url try: - - async def fetch_with_limit(u): - async with self._semaphore: - _log.debug('Fetching URL: %s', u) - return await session.get( - u, ssl=False, timeout=self._timeout - ) - response = await _asyncio.gather(*[ - fetch_with_limit(u) for u in url + self._fetch_url(session, u) for u in url ]) - if any([not r.ok for r in response]): - return None - if return_json: - jsons = list() - for res in response: - try: - data = await res.json() - jsons.append(data) - except ValueError: - _log.error('Error with URL %s', res.url) - jsons.append(None) - response = jsons except _asyncio.TimeoutError as err: raise _exceptions.TimeoutError( 'Timeout reached. Try to increase `timeout`.' @@ -955,16 +926,30 @@ async def fetch_with_limit(u): except _aio_exceptions.ClientPayloadError as err: raise _exceptions.PayloadError( "Payload Error. Increasing `timeout` won't help. " - 'Try:\n - decreasing `query_bin_interval`;\n - decrease the ' - 'time interval for the aquisition;\n - change the ' - '`query_max_concurrency` parameter\n - or using the ' - 'threaded or serial options for `query_method`.' + 'Try:\n - decreasing `query_bin_interval`;' + '\n - decrease the time interval for the aquisition;' + '\n - or changing the `query_max_concurrency` parameter' ) from err if len(url) == 1: return response[0] return response + async def _fetch_url(self, session, url): + _log.debug('Fetching URL: %s', url) + async with self._semaphore: + async with session.get(url, timeout=self._timeout) as response: + if response.status != 200: + return None + try: + return await response.json() + except _aio_exceptions.ContentTypeError: + # for cases where response returns html (self.connected). + return await response.text() + except ValueError: + _log.error('Error with URL %s', response.url) + return None + async def _create_session(self, url, headers, payload, ssl): """Create session and handle login.""" session = _ClientSession() From e3a713b64f70233361c4b0445031718b28c8978c Mon Sep 17 00:00:00 2001 From: Fernando Date: Wed, 1 Apr 2026 16:24:38 -0300 Subject: [PATCH 28/60] BUG: (CLTARCH.CLT) Removes deprecated `return_json` flag from request call --- siriuspy/siriuspy/clientarch/pvarch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index 07f8087d5..501a6331a 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -821,7 +821,7 @@ def update(self, timeout=None): pvn2idcs[pvn] = _np.arange(ini, end) try: - resps = self.connector.make_request(all_urls, return_json=True) + resps = self.connector.make_request(all_urls) finally: if timeout is not None: self.timeout = timeout0 From 3ebedb0814521f52ff5b609db7d61aa2c148f427 Mon Sep 17 00:00:00 2001 From: Fernando Date: Wed, 1 Apr 2026 16:29:44 -0300 Subject: [PATCH 29/60] BUG: (CLTARCH.CLT) fix way of checking for connection. --- siriuspy/siriuspy/clientarch/client.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index bbea9889a..ff1377ebb 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -143,8 +143,7 @@ def connected(self): if not self._loop_alive(): return False try: - resp = self.make_request(self._url + '/mgmt') - return resp.status == 200 + return bool(self.make_request(self._url + '/mgmt')) except _urllib.error.URLError: return False From 8905e616b2e84bf2f8c944aea4fc450057517ac4 Mon Sep 17 00:00:00 2001 From: Fernando Date: Wed, 1 Apr 2026 16:36:54 -0300 Subject: [PATCH 30/60] ENH: (CLTARCH.CLT) Adds support for fetching PV type info. Introduces a method to retrieve process variable type information, enhancing the client's capability to introspect PV details, such as archiving policyName. --- siriuspy/siriuspy/clientarch/client.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index ff1377ebb..d3a48a7b5 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -372,6 +372,20 @@ def get_recently_modified_pvs(self, max_num_pvs=None, epoch_time=True): return None if not resp else resp + def get_pv_type_info(self, pvname: str): + """Get PV Type Info. + + Args: + pvname (str): Name of the PV to get type info. + + Returns: + list: List of dictionary with PVs details. + + """ + url = self._create_url(method='getPVTypeInfo', pv=pvname) + resp = self.make_request(url) + return None if not resp else resp + def pause_pvs(self, pvnames): """Pause PVs.""" if not isinstance(pvnames, (list, tuple)): From ce001d28175b9a1eca1736899533ddf3c339fd79 Mon Sep 17 00:00:00 2001 From: Fernando Date: Wed, 1 Apr 2026 16:42:24 -0300 Subject: [PATCH 31/60] ENH: (CLTARCH) Adds `query_max_concurrency` property to PVDataSet and PVData. Exposes a property for configuring the maximum query concurrency at both the dataset and data levels, improving convenience for users to control global connector settings through either interface. --- siriuspy/siriuspy/clientarch/pvarch.py | 28 ++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index 501a6331a..af209ee90 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -319,6 +319,20 @@ def query_bin_interval(self): def query_bin_interval(self, new_intvl): self._query_bin_interval = new_intvl + @property + def query_max_concurrency(self): + """Query max concurrency. + + This is a global setting for the connector, so all PVData objects + share it, but we allow it to be set through PVDataSet for convenience. + + """ + return self.connector.query_max_concurrency + + @query_max_concurrency.setter + def query_max_concurrency(self, new_intvl): + self.connector.query_max_concurrency = new_intvl + @property def timestamp_start(self): """Timestamp start.""" @@ -629,6 +643,20 @@ def query_bin_interval(self, value): for pvn, val in zip(self._pvnames, value): # noqa: B905 self._pvdata[pvn].query_bin_interval = val + @property + def query_max_concurrency(self): + """Query max concurrency. + + This is a global setting for the connector, so all PVData objects + share it, but we allow it to be set through PVDataSet for convenience. + + """ + return self.connector.query_max_concurrency + + @query_max_concurrency.setter + def query_max_concurrency(self, new_intvl): + self.connector.query_max_concurrency = new_intvl + @property def time_start(self): """Start time.""" From 3d51d8d31e5837c391850a7187574cbf9691f85b Mon Sep 17 00:00:00 2001 From: Fernando Date: Wed, 1 Apr 2026 17:01:03 -0300 Subject: [PATCH 32/60] ENH: (CLTARCH.CL) Adds timezone support to time interval generation Ensures generated time intervals consistently use the specified timezone, defaulting to the start time's timezone if none is provided. Updates tests to validate correct timezone handling. Improves interoperability and prevents subtle bugs due to timezone mismatches. --- siriuspy/siriuspy/clientarch/time.py | 24 +++++++++++++------- siriuspy/tests/clientarch/test_clientarch.py | 9 ++++++-- 2 files changed, 23 insertions(+), 10 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/time.py b/siriuspy/siriuspy/clientarch/time.py index 3248e8e15..bb20f64a9 100644 --- a/siriuspy/siriuspy/clientarch/time.py +++ b/siriuspy/siriuspy/clientarch/time.py @@ -57,6 +57,9 @@ def __new__(cls, *args, **kwargs): # noqa: D417, C901 """ if not args and not kwargs: raise TypeError('no arguments found to build Time object') + if len(args) == 2: + kwargs['tzinfo'] = args[1] + args = args[:1] if len(args) == 1: arg = args[0] dic_ = { @@ -84,9 +87,7 @@ def __new__(cls, *args, **kwargs): # noqa: D417, C901 args = args[:7] if not {'timestamp', 'timestamp_string'} - kwargs.keys(): - raise TypeError( - 'Conflicting positional and keyword arguments.' - ) + raise TypeError('Conflicting positional and keyword arguments.') tz = kwargs.get('tzinfo') tzl = _datetime.now().astimezone().tzinfo @@ -102,8 +103,8 @@ def __new__(cls, *args, **kwargs): # noqa: D417, C901 ts_fmt = kwargs.get( 'timestamp_format', Time._DEFAULT_TIMESTAMP_FORMAT ) - return super().strptime(ts_str, ts_fmt).replace( - tzinfo=tz or tzl + return ( + super().strptime(ts_str, ts_fmt).replace(tzinfo=tz or tzl) ) except ValueError: tim = super().fromisoformat(ts_str) @@ -146,7 +147,11 @@ def conv_to_epoch(time, datetime_format): def get_time_intervals( - time_start: Time, time_stop: Time, interval: int, return_isoformat=False + time_start: Time, + time_stop: Time, + interval: int, + return_isoformat=False, + tzinfo=None, ): """Break `time_start` to `time_stop` in intervals of `interval` seconds. @@ -155,17 +160,20 @@ def get_time_intervals( time_stop (Time): stop time. interval (int): interval duration in seconds. return_isoformat (bool): return in iso8601 format. + tzinfo (tzinfo): timezone info. Defaults to None, which means using + the timezone of `time_start`. Returns: start_time (Time|str | list[Time|str]): start times. stop_time (Time|str | list[Time|str]): stop times. """ + tzinfo = tzinfo or time_start.tzinfo t_start = time_start.timestamp() t_stop = time_stop.timestamp() t_start = _np.arange(t_start, t_stop, interval) t_stop = _np.r_[t_start[1:], t_stop] - t_start = [Time(t) for t in t_start] - t_stop = [Time(t) for t in t_stop] + t_start = [Time(t, tzinfo=tzinfo) for t in t_start] + t_stop = [Time(t, tzinfo=tzinfo) for t in t_stop] if return_isoformat: t_start = [t.get_iso8601() for t in t_start] t_stop = [t.get_iso8601() for t in t_stop] diff --git a/siriuspy/tests/clientarch/test_clientarch.py b/siriuspy/tests/clientarch/test_clientarch.py index 90c39bc6e..51230a4c5 100644 --- a/siriuspy/tests/clientarch/test_clientarch.py +++ b/siriuspy/tests/clientarch/test_clientarch.py @@ -116,7 +116,8 @@ def test_constructor(self): def test_get_time_intervals(self): """Test get_time_intervals.""" - time_start = Time(2026, 1, 13, 0, 0, 0, 345) + tz_info = datetime.timezone(datetime.timedelta(seconds=-3 * 3600)) + time_start = Time(2026, 1, 13, 0, 0, 0, 345, tzinfo=tz_info) time_stop = time_start + 24 * 3600 interval = 3600 * 10 @@ -131,7 +132,11 @@ def test_get_time_intervals(self): '2026-01-14T00:00:00.000345-03:00', ] tst, tsp = get_time_intervals( - time_start, time_stop, interval, return_isoformat=True + time_start, + time_stop, + interval, + return_isoformat=True, + tzinfo=tz_info, ) self.assertEqual(tst, tst_corr) self.assertEqual(tsp, tsp_corr) From 9a14fbf052f4f83175b0e1aa51255944c663213d Mon Sep 17 00:00:00 2001 From: Fernando Date: Wed, 1 Apr 2026 17:14:10 -0300 Subject: [PATCH 33/60] BUG: (CLTARCH) fix get_iso8601 to return correct timezone. Forces use of the object's timezone when converting to ISO 8601, preventing unintended local timezone conversions and improving consistency in datetime serialization. --- siriuspy/siriuspy/clientarch/time.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/siriuspy/siriuspy/clientarch/time.py b/siriuspy/siriuspy/clientarch/time.py index bb20f64a9..6f201c643 100644 --- a/siriuspy/siriuspy/clientarch/time.py +++ b/siriuspy/siriuspy/clientarch/time.py @@ -116,7 +116,7 @@ def __new__(cls, *args, **kwargs): # noqa: D417, C901 def get_iso8601(self): """Get iso8601 format.""" - return self.astimezone().isoformat() + return self.astimezone(self.tzinfo).isoformat() def __add__(self, other): """Addition.""" From be3f193dccce22441f783330ee4a70386a674181 Mon Sep 17 00:00:00 2001 From: Fernando Date: Wed, 1 Apr 2026 18:18:40 -0300 Subject: [PATCH 34/60] ENH: (CLTARCH) Adds string representations for data and dataset classes Introduces human-readable __str__ methods to data and dataset classes, enabling quick inspection of their key properties and statistics. Improves debugging and logging by providing formatted summaries of state and content. --- siriuspy/siriuspy/clientarch/pvarch.py | 100 ++++++++++++++++++++++++- 1 file changed, 98 insertions(+), 2 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index af209ee90..14b772883 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -273,6 +273,48 @@ def __init__(self, pvname, connector=None, offline_data=False): self._processing_type_param1 = None self._processing_type_param2 = 3.0 # number of sigma + def __str__(self): + """.""" + stg = '' + tss = self.time_start + tss = tss.get_iso8601() if tss else 'Not Defined.' + tsp = self.time_stop + tsp = tsp.get_iso8601() if tsp else 'Not Defined.' + stg += '{:<30s}: {:}\n'.format('pvname', self.pvname) + stg += '{:<30s}: {:}\n'.format('time_start', tss) + stg += '{:<30s}: {:}\n'.format('time_stop', tsp) + stg += '{:<30s}: {:d}\n'.format( + 'query_bin_interval', self.query_bin_interval + ) + stg += '{:<30s}: {:}\n'.format( + 'query_max_concurrency', self.query_max_concurrency + ) + prty = self.processing_type + pr1 = self.processing_type_param1 + stg += '{:<30s}: {:}\n'.format( + 'processing_type', prty if prty else "''" + ) + if prty == self.ProcessingTypes.SelectByChange: + pr1 = 'None' if pr1 is None else f'{pr1:.2g}' + stg += '{:<30s}: {:}\n'.format('processing_type_param1', pr1) + elif prty != self.ProcessingTypes.None_: + pr1 = 'None' if pr1 is None else f'{pr1:d}' + stg += '{:<30s}: {:}\n'.format('processing_type_param1', pr1) + if prty in ( + self.ProcessingTypes.Outliers, + self.ProcessingTypes.IgnoreOutliers, + ): + stg += '{:<30s}: {:.2g}\n'.format( + 'processing_type_param2', self.processing_type_param2 + ) + + stg += '{:<30s}'.format('Data Length: ') + if self.timestamp is not None: + stg += '{:d}\n'.format(len(self.timestamp)) + else: + stg += 'Not loaded yet.\n' + return stg + @property def pvname(self): """PVName.""" @@ -615,6 +657,61 @@ def __init__(self, pvnames, connector=None, offline_data=False): self._pvnames = pvnames self._pvdata = self._init_pvdatas(pvnames, self.connector) + def __str__(self): + """.""" + stg = '' + tmpl = '{:<30s} {:^30s} {:^30s} {:^15s} {:^15s} ' + tmpl += '{:^12s} {:^10s} {:^10s} {:^10s}\n' + stg += tmpl.format( + 'PV Name', + 'Time Start', + 'Time Stop', + 'Bin Interval', + 'Max Concurrency', + 'Proc. Type', + 'Param1', + 'Param2', + 'Data Length', + ) + for pvn, pvd in self._pvdata.items(): + prty = pvd.processing_type + prty = prty if prty else "''" + + pr1 = pvd.processing_type_param1 + pr1s = 'None' if pr1 is None else f'{pr1:d}' + if prty != self.ProcessingTypes.SelectByChange: + pr1s = 'None' if pr1 is None else f'{pr1:.1g}' + elif prty != self.ProcessingTypes.None_: + pr1s = 'N/A' + + pr2 = 'N/A' + if prty in ( + self.ProcessingTypes.Outliers, + self.ProcessingTypes.IgnoreOutliers, + ): + pr2 = f'{pvd.processing_type_param2:.1g}' + + dlen = 'Not Loaded' + if pvd.timestamp is not None: + stg += f'{len(pvd.timestamp):d}' + + tss = pvd.time_start + tss = tss.get_iso8601() if tss else 'Not Def.' + tsp = pvd.time_stop + tsp = tsp.get_iso8601() if tsp else 'Not Def.' + stg += tmpl.format( + pvn, + tss, + tsp, + f'{pvd.query_bin_interval:d}', + f'{pvd.query_max_concurrency:d}', + prty, + pr1s, + pr2, + dlen, + ) + return stg + @property def pvnames(self): """PV names.""" @@ -826,8 +923,7 @@ def update(self, timeout=None): pvd = self._pvdata[pvn] if None in (pvd.timestamp_start, pvd.timestamp_stop): print( - f'Start and stop times not defined for PV {pvn}' - '! Aborting.' + f'Start and stop times not defined for PV {pvn}! Aborting.' ) if timeout is not None: self.timeout = timeout0 From 7951282c2da6c8e1ff78c8708674294dc49e000a Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 2 Apr 2026 11:14:08 -0300 Subject: [PATCH 35/60] ENH: (CLTARCH) Adds timeout property to allow connector timeout configuration Enables setting and retrieving the global connection timeout via both PVData and PVDataSet, improving flexibility and convenience for adjusting connector timeout behavior from client code. --- siriuspy/siriuspy/clientarch/pvarch.py | 28 ++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index 14b772883..e9b7a30f7 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -375,6 +375,20 @@ def query_max_concurrency(self): def query_max_concurrency(self, new_intvl): self.connector.query_max_concurrency = new_intvl + @property + def timeout(self): + """Connection timeout. + + This is a global setting for the connector, so all PVData objects + share it, but we allow it to be set through PVDataSet for convenience. + + """ + return self.connector.timeout + + @timeout.setter + def timeout(self, value): + self.connector.timeout = float(value) + @property def timestamp_start(self): """Timestamp start.""" @@ -754,6 +768,20 @@ def query_max_concurrency(self): def query_max_concurrency(self, new_intvl): self.connector.query_max_concurrency = new_intvl + @property + def timeout(self): + """Connection timeout. + + This is a global setting for the connector, so all PVData objects + share it, but we allow it to be set through PVDataSet for convenience. + + """ + return self.connector.timeout + + @timeout.setter + def timeout(self, value): + self.connector.timeout = float(value) + @property def time_start(self): """Start time.""" From a7c63430dd84a10b98c042f55f4634987afeb924 Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 2 Apr 2026 14:05:20 -0300 Subject: [PATCH 36/60] ENH: (CLTARCH) Improves PV data string representation formatting Clarifies and reorganizes the string output for PV data objects by grouping connector and data properties, adding units to parameter labels, and enhancing readability with indentation. Makes the output more informative for users inspecting PV data properties. --- siriuspy/siriuspy/clientarch/pvarch.py | 47 ++++++++++++++++---------- 1 file changed, 30 insertions(+), 17 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index e9b7a30f7..f465e64f0 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -276,39 +276,48 @@ def __init__(self, pvname, connector=None, offline_data=False): def __str__(self): """.""" stg = '' + stg += 'Connector Properties:\n' + stg += ' {:<30s}: {:d}\n'.format( + 'query_max_concurrency: ', self.query_max_concurrency + ) + stg += ' {:<30s}: {:.1f}\n'.format('timeout [s]', self.timeout) + stg += '\nPV Data Properties:\n' + tss = self.time_start tss = tss.get_iso8601() if tss else 'Not Defined.' tsp = self.time_stop tsp = tsp.get_iso8601() if tsp else 'Not Defined.' - stg += '{:<30s}: {:}\n'.format('pvname', self.pvname) - stg += '{:<30s}: {:}\n'.format('time_start', tss) - stg += '{:<30s}: {:}\n'.format('time_stop', tsp) - stg += '{:<30s}: {:d}\n'.format( - 'query_bin_interval', self.query_bin_interval - ) - stg += '{:<30s}: {:}\n'.format( - 'query_max_concurrency', self.query_max_concurrency + stg += ' {:<30s}: {:}\n'.format('pvname', self.pvname) + stg += ' {:<30s}: {:}\n'.format('time_start', tss) + stg += ' {:<30s}: {:}\n'.format('time_stop', tsp) + stg += ' {:<30s}: {:d}\n'.format( + 'query_bin_interval [s]', self.query_bin_interval ) prty = self.processing_type pr1 = self.processing_type_param1 - stg += '{:<30s}: {:}\n'.format( + stg += ' {:<30s}: {:}\n'.format( 'processing_type', prty if prty else "''" ) if prty == self.ProcessingTypes.SelectByChange: pr1 = 'None' if pr1 is None else f'{pr1:.2g}' - stg += '{:<30s}: {:}\n'.format('processing_type_param1', pr1) + stg += ' {:<30s}: {:}\n'.format( + 'processing_type_param1 [val. units]', pr1 + ) elif prty != self.ProcessingTypes.None_: pr1 = 'None' if pr1 is None else f'{pr1:d}' - stg += '{:<30s}: {:}\n'.format('processing_type_param1', pr1) + stg += ' {:<30s}: {:}\n'.format( + 'processing_type_param1 [s]', pr1 + ) if prty in ( self.ProcessingTypes.Outliers, self.ProcessingTypes.IgnoreOutliers, ): - stg += '{:<30s}: {:.2g}\n'.format( - 'processing_type_param2', self.processing_type_param2 + stg += ' {:<30s}: {:.2g}\n'.format( + 'processing_type_param2 [std/mean]', + self.processing_type_param2, ) - stg += '{:<30s}'.format('Data Length: ') + stg += ' {:<30s}'.format('Data Length: ') if self.timestamp is not None: stg += '{:d}\n'.format(len(self.timestamp)) else: @@ -674,14 +683,19 @@ def __init__(self, pvnames, connector=None, offline_data=False): def __str__(self): """.""" stg = '' - tmpl = '{:<30s} {:^30s} {:^30s} {:^15s} {:^15s} ' + stg += 'Connector Properties:\n' + stg += ' {:<30s}: {:d}\n'.format( + 'query_max_concurrency', self.query_max_concurrency + ) + stg += ' {:<30s}: {:.1f}\n'.format('timeout [s]', self.timeout) + stg += '\nPV Data Properties:\n' + tmpl = ' {:<30s} {:^30s} {:^30s} {:^15s} ' tmpl += '{:^12s} {:^10s} {:^10s} {:^10s}\n' stg += tmpl.format( 'PV Name', 'Time Start', 'Time Stop', 'Bin Interval', - 'Max Concurrency', 'Proc. Type', 'Param1', 'Param2', @@ -718,7 +732,6 @@ def __str__(self): tss, tsp, f'{pvd.query_bin_interval:d}', - f'{pvd.query_max_concurrency:d}', prty, pr1s, pr2, From 96f51532ae05d5f6c692114a1098ca20a3a2dbef Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 2 Apr 2026 14:06:59 -0300 Subject: [PATCH 37/60] BUG: (CLTARCH.CLT) Fixes response handling for single URL requests Ensures correct identification of single URL inputs, improving clarity and reliability when distinguishing between single and multiple URL requests. Prevents potential errors in response unpacking logic. This was causing an error for get_data when a single url was being requested. --- siriuspy/siriuspy/clientarch/client.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index d3a48a7b5..638dda0a4 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -927,7 +927,8 @@ async def _handle_request_async(self, url, need_login=False): async def _get_request_response(self, url, session): """Get request response.""" - url = [url] if isinstance(url, str) else url + single = isinstance(url, str) + url = [url] if single else url try: response = await _asyncio.gather(*[ self._fetch_url(session, u) for u in url @@ -944,7 +945,7 @@ async def _get_request_response(self, url, session): '\n - or changing the `query_max_concurrency` parameter' ) from err - if len(url) == 1: + if single: return response[0] return response From 741326e550825a988ead741b3ae66c0ea0d67142 Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 2 Apr 2026 14:09:02 -0300 Subject: [PATCH 38/60] MNT: (CLTARCH.CLT) Defers debug logging until after HTTP request initiate Moves the debug log statement to occur after acquiring the lock. --- siriuspy/siriuspy/clientarch/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 638dda0a4..1e936561b 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -950,8 +950,8 @@ async def _get_request_response(self, url, session): return response async def _fetch_url(self, session, url): - _log.debug('Fetching URL: %s', url) async with self._semaphore: + _log.debug('Fetching URL: %s', url) async with session.get(url, timeout=self._timeout) as response: if response.status != 200: return None From 899a4c60ba4685a03bc9c298ff7ef2a475bb55a5 Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 2 Apr 2026 14:10:39 -0300 Subject: [PATCH 39/60] MNT: (CLTARCH.CLT) Removes timeout when awaiting async future result. Eliminates the explicit timeout when retrieving the result of an async future. Timeout is controlled at the individual request level. --- siriuspy/siriuspy/clientarch/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 1e936561b..1ac4c492c 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -910,7 +910,7 @@ def _run_sync_coro(self, coro): if not self._thread.is_alive(): raise RuntimeError('Library is shut down') future = _asyncio.run_coroutine_threadsafe(coro, self._loop) - return future.result(timeout=self._timeout) + return future.result() # ---------- async methods ---------- From 1da30b7e8ef67a11e6779a2a108753b4323de8b1 Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 2 Apr 2026 14:34:40 -0300 Subject: [PATCH 40/60] API: (CLTARCH) Renames timeout to query_timeout for clarity Updates all references, method signatures, and documentation to consistently use 'query_timeout' instead of 'timeout' for query-related timeouts. Improves code clarity by distinguishing per-query timeouts from other potential timeout types and avoids ambiguity. --- siriuspy/siriuspy/clientarch/client.py | 42 ++++---- siriuspy/siriuspy/clientarch/devices.py | 4 +- siriuspy/siriuspy/clientarch/pvarch.py | 97 ++++++++----------- .../siriuspy/machshift/gensumm_macreport.py | 4 +- .../siriuspy/machshift/savedata_macreport.py | 2 +- 5 files changed, 67 insertions(+), 82 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 1ac4c492c..6432b8df0 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -18,7 +18,7 @@ import urllib3 as _urllib3 from aiohttp import ( client_exceptions as _aio_exceptions, - ClientSession as _ClientSession + ClientSession as _ClientSession, ) from mathphys.functions import get_namedtuple as _get_namedtuple @@ -37,7 +37,7 @@ class ClientArchiver: DEF_QUERY_BIN_INTERVAL = 12 * 60 * 60 # 12h DEF_QUERY_MAX_CONCURRENCY = 100 # maximum number of concurrent queries - DEFAULT_TIMEOUT = 5.0 # [s] + DEFAULT_QUERY_TIMEOUT = 5.0 # [s] SERVER_URL = _envars.SRVURL_ARCHIVER ENDPOINT = '/mgmt/bpl' @@ -93,13 +93,13 @@ def __delete__(self): self.logout() self.shutdown() - def __init__(self, server_url=None, timeout=None): + def __init__(self, server_url=None, query_timeout=None): """Initialize.""" - timeout = timeout or ClientArchiver.DEFAULT_TIMEOUT + query_timeout = query_timeout or ClientArchiver.DEFAULT_QUERY_TIMEOUT self.session = None self._aiohttp_session = None self._requests_session = None - self._timeout = timeout + self._query_timeout = query_timeout self._url = server_url or self.SERVER_URL self._request_url = None self._thread = self._loop = self._semaphore = None @@ -148,14 +148,14 @@ def connected(self): return False @property - def timeout(self): - """Connection timeout.""" - return self._timeout + def query_timeout(self): + """Request timeout for each query.""" + return self._query_timeout - @timeout.setter - def timeout(self, value): - """Set connection timeout.""" - self._timeout = float(value) + @query_timeout.setter + def query_timeout(self, value): + """Set request timeout for each query.""" + self._query_timeout = float(value) @property def server_url(self): @@ -939,7 +939,7 @@ async def _get_request_response(self, url, session): ) from err except _aio_exceptions.ClientPayloadError as err: raise _exceptions.PayloadError( - "Payload Error. Increasing `timeout` won't help. " + "Payload Error. Increasing `query_timeout` won't help. " 'Try:\n - decreasing `query_bin_interval`;' '\n - decrease the time interval for the aquisition;' '\n - or changing the `query_max_concurrency` parameter' @@ -952,23 +952,27 @@ async def _get_request_response(self, url, session): async def _fetch_url(self, session, url): async with self._semaphore: _log.debug('Fetching URL: %s', url) - async with session.get(url, timeout=self._timeout) as response: - if response.status != 200: + async with session.get(url, timeout=self._query_timeout) as resp: + if resp.status != 200: return None try: - return await response.json() + return await resp.json() except _aio_exceptions.ContentTypeError: # for cases where response returns html (self.connected). - return await response.text() + return await resp.text() except ValueError: - _log.error('Error with URL %s', response.url) + _log.error('Error with URL %s', resp.url) return None async def _create_session(self, url, headers, payload, ssl): """Create session and handle login.""" session = _ClientSession() async with session.post( - url, headers=headers, data=payload, ssl=ssl, timeout=self._timeout + url, + headers=headers, + data=payload, + ssl=ssl, + timeout=self._query_timeout, ) as response: content = await response.content.read() authenticated = b'authenticated' in content diff --git a/siriuspy/siriuspy/clientarch/devices.py b/siriuspy/siriuspy/clientarch/devices.py index bc911a10c..49766c863 100644 --- a/siriuspy/siriuspy/clientarch/devices.py +++ b/siriuspy/siriuspy/clientarch/devices.py @@ -59,9 +59,9 @@ def values(self): """Return retrieved orbit interpolated values.""" return self._values - def update(self, timeout=None): + def update(self, query_timeout=None): """Update state by retrieving data.""" - super().update(timeout=timeout) + super().update(query_timeout=query_timeout) # interpolate data self._times, self._values = self._interpolate_data() diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index f465e64f0..b3693e4c6 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -5,7 +5,7 @@ import numpy as _np from mathphys.functions import ( load_pickle as _load_pickle, - save_pickle as _save_pickle + save_pickle as _save_pickle, ) from .. import envars as _envars @@ -59,14 +59,19 @@ def is_offline_data(self): return self._offline_data @property - def timeout(self): - """Connection timeout.""" - return self.connector.timeout + def query_timeout(self): + """Request timeout for each query. - @timeout.setter - def timeout(self, value): - """Set connection timeout.""" - self.connector.timeout = float(value) + This is a global setting for the connector, so all PVData objects + share it, but we allow it to be set through PVDataSet for convenience. + + """ + return self.connector.query_timeout + + @query_timeout.setter + def query_timeout(self, value): + """Set request timeout for each query.""" + self.connector.query_timeout = float(value) @property def connected(self): @@ -189,19 +194,19 @@ def is_archived(self): return False return True - def update(self, timeout=None): # noqa: C901 + def update(self, query_timeout=None): # noqa: C901 """.""" self.connect() - if timeout is not None: - timeout0 = self.timeout - self.timeout = timeout + if query_timeout is not None: + query_timeout0 = self.query_timeout + self.query_timeout = query_timeout try: data = self.connector.get_pv_details(self.pvname) finally: - if timeout is not None: - self.timeout = timeout0 + if query_timeout is not None: + self.query_timeout = query_timeout0 if not data: return False @@ -280,7 +285,9 @@ def __str__(self): stg += ' {:<30s}: {:d}\n'.format( 'query_max_concurrency: ', self.query_max_concurrency ) - stg += ' {:<30s}: {:.1f}\n'.format('timeout [s]', self.timeout) + stg += ' {:<30s}: {:.1f}\n'.format( + 'query_timeout [s]', self.query_timeout + ) stg += '\nPV Data Properties:\n' tss = self.time_start @@ -384,20 +391,6 @@ def query_max_concurrency(self): def query_max_concurrency(self, new_intvl): self.connector.query_max_concurrency = new_intvl - @property - def timeout(self): - """Connection timeout. - - This is a global setting for the connector, so all PVData objects - share it, but we allow it to be set through PVDataSet for convenience. - - """ - return self.connector.timeout - - @timeout.setter - def timeout(self, value): - self.connector.timeout = float(value) - @property def timestamp_start(self): """Timestamp start.""" @@ -502,13 +495,13 @@ def processing_type_param2(self, new_param): ) self._processing_type_param2 = new_param - def update(self, timeout=None): + def update(self, query_timeout=None): """Update.""" self.connect() - if timeout is not None: - timeout0 = self.timeout - self.timeout = timeout + if query_timeout is not None: + query_timeout0 = self.query_timeout + self.query_timeout = query_timeout if None in (self.timestamp_start, self.timestamp_stop): print('Start and stop timestamps not defined! Aborting.') @@ -525,8 +518,8 @@ def update(self, timeout=None): proc_type_param2=self.processing_type_param2, ) finally: - if timeout is not None: - self.timeout = timeout0 + if query_timeout is not None: + self.query_timeout = query_timeout0 if not data: return @@ -687,7 +680,9 @@ def __str__(self): stg += ' {:<30s}: {:d}\n'.format( 'query_max_concurrency', self.query_max_concurrency ) - stg += ' {:<30s}: {:.1f}\n'.format('timeout [s]', self.timeout) + stg += ' {:<30s}: {:.1f}\n'.format( + 'query_timeout [s]', self.query_timeout + ) stg += '\nPV Data Properties:\n' tmpl = ' {:<30s} {:^30s} {:^30s} {:^15s} ' tmpl += '{:^12s} {:^10s} {:^10s} {:^10s}\n' @@ -781,20 +776,6 @@ def query_max_concurrency(self): def query_max_concurrency(self, new_intvl): self.connector.query_max_concurrency = new_intvl - @property - def timeout(self): - """Connection timeout. - - This is a global setting for the connector, so all PVData objects - share it, but we allow it to be set through PVDataSet for convenience. - - """ - return self.connector.timeout - - @timeout.setter - def timeout(self, value): - self.connector.timeout = float(value) - @property def time_start(self): """Start time.""" @@ -950,13 +931,13 @@ def archived(self): archived = set(self._pvnames) - set(self.not_archived) return list(archived) - def update(self, timeout=None): + def update(self, query_timeout=None): """Update.""" self.connect() - if timeout is not None: - timeout0 = self.timeout - self.timeout = timeout + if query_timeout is not None: + query_timeout0 = self.query_timeout + self.query_timeout = query_timeout all_urls = [] pvn2idcs = dict() @@ -966,8 +947,8 @@ def update(self, timeout=None): print( f'Start and stop times not defined for PV {pvn}! Aborting.' ) - if timeout is not None: - self.timeout = timeout0 + if query_timeout is not None: + self.query_timeout = query_timeout0 return urls = self.connector.get_request_url_for_get_data( pvn, @@ -988,8 +969,8 @@ def update(self, timeout=None): try: resps = self.connector.make_request(all_urls) finally: - if timeout is not None: - self.timeout = timeout0 + if query_timeout is not None: + self.query_timeout = query_timeout0 if not resps: return None diff --git a/siriuspy/siriuspy/machshift/gensumm_macreport.py b/siriuspy/siriuspy/machshift/gensumm_macreport.py index 7925aa7c9..89b0a20da 100644 --- a/siriuspy/siriuspy/machshift/gensumm_macreport.py +++ b/siriuspy/siriuspy/machshift/gensumm_macreport.py @@ -53,7 +53,7 @@ for intvl in intervals: macreports[intvl[0]] = MacReport() # macreports[intvl[0]].connector.server_url = 'https://archiver-temp.cnpem.br' # necessary for 2024 and early - macreports[intvl[0]].connector.timeout = 30 + macreports[intvl[0]].connector.query_timeout = 30 macreports[intvl[0]].time_start = intvl[0] macreports[intvl[0]].time_stop = intvl[1] macreports[intvl[0]].update() @@ -136,7 +136,7 @@ # programmed vs. delivered hours macr = MacReport() -macr.connector.timeout = 300 +macr.connector.query_timeout = 300 macr.time_start = Time(2024, 1, 1, 0, 0) macr.time_stop = Time(2024, 12, 31, 23, 59, 59) macr.update() diff --git a/siriuspy/siriuspy/machshift/savedata_macreport.py b/siriuspy/siriuspy/machshift/savedata_macreport.py index 2a8848e51..3f55cba92 100644 --- a/siriuspy/siriuspy/machshift/savedata_macreport.py +++ b/siriuspy/siriuspy/machshift/savedata_macreport.py @@ -10,7 +10,7 @@ # get data from interval macr = MacReport() -macr.connector.timeout = 300 +macr.connector.query_timeout = 300 macr.time_start = time_start macr.time_stop = time_stop macr.update() From 71ed5a62f1fb03dd0f40bf7ecb533ea0ae4985e3 Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 2 Apr 2026 15:05:54 -0300 Subject: [PATCH 41/60] MNT: (CLTARCH.CLT) Refactors semaphore initialization for async requests Moves semaphore creation and cleanup into the asynchronous request handler to ensure concurrency control is scoped to each request, simplifying event loop and concurrency management. This avoids potential issues with event loop association and ensures correct concurrency behavior. --- siriuspy/siriuspy/clientarch/client.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 6432b8df0..78165e427 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -114,10 +114,6 @@ def connect(self): return self._loop = _asyncio.new_event_loop() - self._semaphore = _asyncio.Semaphore( - self._query_max_concurrency, loop=self._loop - ) # limit concurrent requests - self._thread = _Thread(target=self._run_event_loop, daemon=True) self._thread.start() @@ -205,9 +201,6 @@ def query_max_concurrency(self, new_val): + str(type(new_val)) ) self._query_max_concurrency = int(new_val) - self._semaphore = _asyncio.Semaphore( - self._query_max_concurrency, loop=self._loop - ) @property def last_requested_url(self): @@ -916,6 +909,7 @@ def _run_sync_coro(self, coro): async def _handle_request_async(self, url, need_login=False): """Handle request.""" + self._semaphore = _asyncio.Semaphore(self._query_max_concurrency) if self.session is not None: response = await self._get_request_response(url, self.session) elif need_login: @@ -923,6 +917,7 @@ async def _handle_request_async(self, url, need_login=False): else: async with _ClientSession() as sess: response = await self._get_request_response(url, sess) + self._semaphore = None return response async def _get_request_response(self, url, session): From dfbdd4db883ca59f5492ab36096d0773533b012f Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 2 Apr 2026 16:19:04 -0300 Subject: [PATCH 42/60] DOC: (CLTARCH.CLT) Clarifies timeout error message for query operations --- siriuspy/siriuspy/clientarch/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 78165e427..17ab38698 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -930,7 +930,7 @@ async def _get_request_response(self, url, session): ]) except _asyncio.TimeoutError as err: raise _exceptions.TimeoutError( - 'Timeout reached. Try to increase `timeout`.' + 'Timeout reached. Try to increase `query_timeout`.' ) from err except _aio_exceptions.ClientPayloadError as err: raise _exceptions.PayloadError( From b511a2bfce2420ab99d9a9745f7852cdec57371a Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 2 Apr 2026 16:21:31 -0300 Subject: [PATCH 43/60] ENH: (CLTARC) Enforces integer type for query_bin_interval parameters Ensures query bin interval values are consistently cast to integer when set or used, improving type safety and preventing potential errors from passing float values where integers are expected. Updates documentation to reflect the expected integer type. --- siriuspy/siriuspy/clientarch/client.py | 6 +++--- siriuspy/siriuspy/clientarch/pvarch.py | 6 +++--- siriuspy/siriuspy/clientarch/time.py | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 17ab38698..248e9573e 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -186,7 +186,7 @@ def query_bin_interval(self, new_intvl): 'expected argument of type float or int, got ' + str(type(new_intvl)) ) - self._query_bin_interval = new_intvl + self._query_bin_interval = int(new_intvl) @property def query_max_concurrency(self): @@ -422,7 +422,7 @@ def get_data( # noqa: D417 If it is a list or tuple, all PVs will be queried for each of the time intervals. In this case, it must have the same length as `timestamp_start`. - query_bin_interval (float): overwrites `self.query_bin_interval`. + query_bin_interval (int): overwrites `self.query_bin_interval`. Defaults to `self.query_bin_interval`. Maximum interval for queries. If `timestamp_stop - timestamp_start > query_bin_interval`, @@ -528,7 +528,7 @@ def get_request_url_for_get_data( # noqa: C901, D417 If it is a list or tuple, all PVs will be queried for each of the time intervals. In this case, it must have the same length as `timestamp_start`. - query_bin_interval (float): overwrites `self.query_bin_interval`. + query_bin_interval (int): overwrites `self.query_bin_interval`. Defaults to `self.query_bin_interval`. Maximum interval for queries. If `timestamp_stop - timestamp_start > query_bin_interval`, diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index b3693e4c6..b6398a1fa 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -375,7 +375,7 @@ def query_bin_interval(self): @query_bin_interval.setter def query_bin_interval(self, new_intvl): - self._query_bin_interval = new_intvl + self._query_bin_interval = int(new_intvl) @property def query_max_concurrency(self): @@ -754,8 +754,8 @@ def query_bin_interval(self): @query_bin_interval.setter def query_bin_interval(self, value): - if isinstance(value, int): - value = len(self._pvnames) * [value] + if isinstance(value, (int, float)): + value = len(self._pvnames) * [int(value)] if len(value) != len(self._pvnames): raise ValueError('value must have the same length as pvnames') diff --git a/siriuspy/siriuspy/clientarch/time.py b/siriuspy/siriuspy/clientarch/time.py index 6f201c643..4dab11d4b 100644 --- a/siriuspy/siriuspy/clientarch/time.py +++ b/siriuspy/siriuspy/clientarch/time.py @@ -170,7 +170,7 @@ def get_time_intervals( tzinfo = tzinfo or time_start.tzinfo t_start = time_start.timestamp() t_stop = time_stop.timestamp() - t_start = _np.arange(t_start, t_stop, interval) + t_start = _np.arange(t_start, t_stop, int(interval)) t_stop = _np.r_[t_start[1:], t_stop] t_start = [Time(t, tzinfo=tzinfo) for t in t_start] t_stop = [Time(t, tzinfo=tzinfo) for t in t_stop] From 5b01ccb3f1a6094ec200213d6fd035fa9731a58e Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 2 Apr 2026 16:26:09 -0300 Subject: [PATCH 44/60] BUG: (CLTARCH.MACHREP) Adapt MachReport updates for new interface of ClientArchiver. Sets the query bin interval for all remaining PVs and datasets to the total time window, reducing the number of queries and improving update performance. --- siriuspy/siriuspy/machshift/macreport.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/siriuspy/siriuspy/machshift/macreport.py b/siriuspy/siriuspy/machshift/macreport.py index 9689cfcb0..2b611f0ec 100644 --- a/siriuspy/siriuspy/machshift/macreport.py +++ b/siriuspy/siriuspy/machshift/macreport.py @@ -1070,17 +1070,24 @@ def update(self): self._update_log(log_msg.format(self._current_pv, _time.time()-_t0)) + # Set query_bin_interval for the rest of PVs to the total time to + # avoid multiple queries and speed up the process. + tot = self._time_stop - self._time_start + tot = tot.total_seconds() + 1 + # macshift, interlock and stability indicators for pvn in self._pvnames: if pvn == self._current_pv: continue _t0 = _time.time() + self._pvdata[pvn].query_bin_interval = tot self._pvdata[pvn].update() self._update_log(log_msg.format(pvn, _time.time()-_t0)) # ps for group, pvdataset in self._pvdataset.items(): _t0 = _time.time() + pvdataset.query_bin_interval = tot pvdataset.update() self._update_log(log_msg.format( 'SI PS '+group.capitalize(), _time.time()-_t0) From 165c9b8f3c58b5ce6a04404d3524e2fbee0cdb34 Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 2 Apr 2026 16:42:22 -0300 Subject: [PATCH 45/60] BUG: (CLTARCH.TIME) Ensures correct subclass return for fromtimestamp in python <=3.9. Improves subclassing compatibility by ensuring that construction methods always return an instance of the correct subclass, even on Python versions prior to 3.9 where tz-aware methods may return a base datetime object. Enhances compatibility and consistency across Python versions. --- siriuspy/siriuspy/clientarch/time.py | 34 +++++++++++++++++++++++----- 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/time.py b/siriuspy/siriuspy/clientarch/time.py index 4dab11d4b..d8a94c516 100644 --- a/siriuspy/siriuspy/clientarch/time.py +++ b/siriuspy/siriuspy/clientarch/time.py @@ -94,25 +94,47 @@ def __new__(cls, *args, **kwargs): # noqa: D417, C901 if 'datetime' in kwargs: dtim = kwargs['datetime'] tz = tz or dtim.tzinfo or tzl - return super().fromtimestamp(dtim.timestamp(), tz=tz) + obj = super().fromtimestamp(dtim.timestamp(), tz=tz) elif 'timestamp' in kwargs: - return super().fromtimestamp(kwargs['timestamp'], tz=tz or tzl) + obj = super().fromtimestamp(kwargs['timestamp'], tz=tz or tzl) elif 'timestamp_string' in kwargs: ts_str = kwargs['timestamp_string'] try: ts_fmt = kwargs.get( 'timestamp_format', Time._DEFAULT_TIMESTAMP_FORMAT ) - return ( + obj = ( super().strptime(ts_str, ts_fmt).replace(tzinfo=tz or tzl) ) except ValueError: - tim = super().fromisoformat(ts_str) + import sys as _sys + if _sys.version_info <= (3, 8): + from dateutil import parser as _dateutil_parser + tim = _dateutil_parser.parse(ts_str) + else: + tim = super().fromisoformat(ts_str) tz = tz or tim.tzinfo - return super().fromtimestamp(tim.timestamp(), tz=tz) + obj = super().fromtimestamp(tim.timestamp(), tz=tz) else: kwargs.setdefault('tzinfo', tzl) - return super().__new__(cls, *args, **kwargs) + obj = super().__new__(cls, *args, **kwargs) + + # NOTE: This if is necessary for python versions prior to 3.9. + # in this cases, calling super().fromtimestamp with tzinfo returns + # an object of datetime class. + if not isinstance(obj, cls): + return super().__new__( + cls, + obj.year, + obj.month, + obj.day, + obj.hour, + obj.minute, + obj.second, + obj.microsecond, + tzinfo=obj.tzinfo, + ) + return obj def get_iso8601(self): """Get iso8601 format.""" From 0f401ded03dbb606efc524b35ac90fb89fee1723 Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 2 Apr 2026 17:12:27 -0300 Subject: [PATCH 46/60] BUG: (CLTARCH.CLT) Prevents logout on missing session Adds a guard clause to avoid attempting logout when there is no active session. --- siriuspy/siriuspy/clientarch/client.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 248e9573e..2d378281e 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -231,6 +231,8 @@ def login(self, username, password): def logout(self): """Close login session.""" + if self.session is None: + return coro = self._close_session() resp = self._run_sync_coro(coro) self.session = None From 2d50dc1089925dcd945e17ec0a3d056d69c55c8b Mon Sep 17 00:00:00 2001 From: Fernando Date: Thu, 2 Apr 2026 17:14:30 -0300 Subject: [PATCH 47/60] BUG: (CLTARCH.CLT) Prevents SSL errors for IP-based URLs in client requests Wraps client sessions with a connector configured with ssl=False to allow requests to URLs with IP addresses without triggering SSL certificate errors. This change improves reliability when accessing resources over plain IP connections. --- siriuspy/siriuspy/clientarch/client.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 2d378281e..d2b56563f 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -18,7 +18,8 @@ import urllib3 as _urllib3 from aiohttp import ( client_exceptions as _aio_exceptions, - ClientSession as _ClientSession, + ClientSession as _ClSession, + TCPConnector as _TCPConn ) from mathphys.functions import get_namedtuple as _get_namedtuple @@ -917,7 +918,9 @@ async def _handle_request_async(self, url, need_login=False): elif need_login: raise _exceptions.AuthenticationError('You need to login first.') else: - async with _ClientSession() as sess: + # NOTE: we need to define a connector with ssl=False so that url + # with IP address can be requested without SSL errors. + async with _ClSession(connector=_TCPConn(ssl=False)) as sess: response = await self._get_request_response(url, sess) self._semaphore = None return response @@ -963,7 +966,9 @@ async def _fetch_url(self, session, url): async def _create_session(self, url, headers, payload, ssl): """Create session and handle login.""" - session = _ClientSession() + # NOTE: we need to define a connector with ssl=False so that url + # with IP address can be requested without SSL errors. + session = _ClSession(connector=_TCPConn(ssl=False)) async with session.post( url, headers=headers, From c2a411cddde211819aa155edb7bb5cc57a60db2e Mon Sep 17 00:00:00 2001 From: Fernando Date: Mon, 6 Apr 2026 09:23:59 -0300 Subject: [PATCH 48/60] ENH: (CLTARCH.CLT) Prompts for password if not provided in login Improves user experience by securely prompting for a password via the console when one is not supplied to the login method, avoiding the need to include sensitive information directly in code or scripts. --- siriuspy/siriuspy/clientarch/client.py | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index d2b56563f..3c03569c2 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -7,6 +7,7 @@ """ import asyncio as _asyncio +import getpass as _getpass import logging as _log import math as _math import urllib as _urllib @@ -208,9 +209,25 @@ def last_requested_url(self): """.""" return self._request_url - def login(self, username, password): - """Open login session.""" + def login(self, username, password=None): + """Login to the Archiver server. + + Args: + username (str): Username to login. + password (str): Password to login. If not provided, it will be + (secretly) prompted in the console. + + Returns: + bool: True if login was successful, False otherwise. + """ + if self.session is not None: + return True headers = {'User-Agent': 'Mozilla/5.0'} + + if password is None: + password = _getpass.getpass( + prompt=f'Password for user {username}: ', stream=None + ) payload = {'username': username, 'password': password} url = self._create_url(method='login') From ecbad8eb5357b5a67aa59e723eed5383684b9d52 Mon Sep 17 00:00:00 2001 From: Fernando Date: Mon, 6 Apr 2026 09:43:25 -0300 Subject: [PATCH 49/60] BUG: (CLTARCH.CLT) Fix login in control room, where server is defined with IP address. Configures session to accept unsafe cookies when accessing servers via IP address, preventing SSL errors in environments like control rooms where domain names may not be used. --- siriuspy/siriuspy/clientarch/client.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 3c03569c2..8d2ad8ab1 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -20,6 +20,7 @@ from aiohttp import ( client_exceptions as _aio_exceptions, ClientSession as _ClSession, + CookieJar as _CookieJar, TCPConnector as _TCPConn ) from mathphys.functions import get_namedtuple as _get_namedtuple @@ -983,9 +984,14 @@ async def _fetch_url(self, session, url): async def _create_session(self, url, headers, payload, ssl): """Create session and handle login.""" - # NOTE: we need to define a connector with ssl=False so that url - # with IP address can be requested without SSL errors. - session = _ClSession(connector=_TCPConn(ssl=False)) + # NOTE: we need to define a connector with ssl=False and explicitly + # tell aiohttp to accept unsafe cookies so that url with IP address + # can be requested without SSL errors. This is needed in the control + # room, where the server is accessed through its IP address and not a + # domain name. + session = _ClSession( + connector=_TCPConn(ssl=False), cookie_jar=_CookieJar(unsafe=True) + ) async with session.post( url, headers=headers, From 357d93d3e6092f332792aacdf8a0ebed9052956f Mon Sep 17 00:00:00 2001 From: Fernando Date: Mon, 6 Apr 2026 10:15:17 -0300 Subject: [PATCH 50/60] BUG: (CLTARCH.CLT) Fix login in control room. Previous commit was not working. It is required to define the field `"Host"` in the header so that the server accepts the request. Otherwise an 404 error is launched. Thanks to Google AI for the help with this bug. Refactors session creation to set required headers internally, removes unused imports, and ensures previous sessions are properly logged out before new logins. Enhances compatibility with IP-based server access and prevents authentication issues by explicitly setting necessary headers. --- siriuspy/siriuspy/clientarch/client.py | 37 +++++++++++--------------- 1 file changed, 16 insertions(+), 21 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 8d2ad8ab1..13403da2a 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -20,7 +20,6 @@ from aiohttp import ( client_exceptions as _aio_exceptions, ClientSession as _ClSession, - CookieJar as _CookieJar, TCPConnector as _TCPConn ) from mathphys.functions import get_namedtuple as _get_namedtuple @@ -222,8 +221,7 @@ def login(self, username, password=None): bool: True if login was successful, False otherwise. """ if self.session is not None: - return True - headers = {'User-Agent': 'Mozilla/5.0'} + self.logout() if password is None: password = _getpass.getpass( @@ -232,9 +230,7 @@ def login(self, username, password=None): payload = {'username': username, 'password': password} url = self._create_url(method='login') - coro = self._create_session( - url, headers=headers, payload=payload, ssl=False - ) + coro = self._create_session(url, payload=payload) ret = self._run_sync_coro(coro) if ret is not None: self.session, authenticated = ret @@ -982,24 +978,23 @@ async def _fetch_url(self, session, url): _log.error('Error with URL %s', resp.url) return None - async def _create_session(self, url, headers, payload, ssl): + async def _create_session(self, url, payload): """Create session and handle login.""" - # NOTE: we need to define a connector with ssl=False and explicitly - # tell aiohttp to accept unsafe cookies so that url with IP address - # can be requested without SSL errors. This is needed in the control - # room, where the server is accessed through its IP address and not a - # domain name. - session = _ClSession( - connector=_TCPConn(ssl=False), cookie_jar=_CookieJar(unsafe=True) - ) + # NOTE: we need to define a connector with ssl=False so that url with + # IP address can be requested without SSL errors. This is needed in + # the control room, where the server is accessed through its IP + # address and not a domain name. + headers = { + 'User-Agent': 'Mozilla/5.0', + 'Host': 'cnpem.br', # NOTE: this is required (404 otherwise). + 'content-type': 'application/x-www-form-urlencoded', + } + + session = _ClSession(connector=_TCPConn(ssl=False)) async with session.post( - url, - headers=headers, - data=payload, - ssl=ssl, - timeout=self._query_timeout, + url, headers=headers, data=payload, timeout=self._query_timeout ) as response: - content = await response.content.read() + content = await response.read() authenticated = b'authenticated' in content return session, authenticated From e3d88df5de6a90b0ccc77e141e04d8882fc30c2b Mon Sep 17 00:00:00 2001 From: Fernando Date: Mon, 6 Apr 2026 11:38:24 -0300 Subject: [PATCH 51/60] ENH: (CLTARCH.CLT) Recover functionality of old parallel flag by allowing user to set query_bin_interval to 0 or None. --- siriuspy/siriuspy/clientarch/client.py | 33 ++++++++++++++++-------- siriuspy/siriuspy/clientarch/pvarch.py | 21 ++++++++++++--- siriuspy/siriuspy/clientarch/time.py | 11 +++++--- siriuspy/siriuspy/machshift/macreport.py | 13 +++++----- 4 files changed, 54 insertions(+), 24 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 13403da2a..0b19b329b 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -178,17 +178,22 @@ def server_url(self, url): @property def query_bin_interval(self): - """Queries larger than this interval will be split.""" + """Queries larger than this interval will be split. + + If set to 0 or None, no splitting will be done. + """ return self._query_bin_interval @query_bin_interval.setter def query_bin_interval(self, new_intvl): + if new_intvl is None: + new_intvl = 0 if not isinstance(new_intvl, (float, int)): raise _exceptions.TypeError( 'expected argument of type float or int, got ' + str(type(new_intvl)) ) - self._query_bin_interval = int(new_intvl) + self._query_bin_interval = max(int(new_intvl), 0) @property def query_max_concurrency(self): @@ -443,7 +448,8 @@ def get_data( # noqa: D417 Defaults to `self.query_bin_interval`. Maximum interval for queries. If `timestamp_stop - timestamp_start > query_bin_interval`, - it will be split into parallel queries. + it will be split into parallel queries. If query_bin_interval<=0, + no splitting will be done. proc_type (str): data processing type to use for query. Defaults to ''. For details on each operator, please, refer to the section Processing of data of the following page: @@ -549,7 +555,8 @@ def get_request_url_for_get_data( # noqa: C901, D417 Defaults to `self.query_bin_interval`. Maximum interval for queries. If `timestamp_stop - timestamp_start > query_bin_interval`, - it will be split into parallel queries. + it will be split into parallel queries. If query_bin_interval<=0, + no splitting will be done. proc_type (str): data processing type to use for query. Defaults to ''. For details on each operator, please, refer to the section Processing of data of the following page: @@ -621,7 +628,9 @@ def get_request_url_for_get_data( # noqa: C901, D417 '`timestamp_start` and `timestamp_stop` must have same length.' ) - bin_interval = query_bin_interval or self.query_bin_interval + inter = self.query_bin_interval + if query_bin_interval is not None: + inter = query_bin_interval tstamps_start = [] tstamps_stop = [] @@ -636,7 +645,7 @@ def get_request_url_for_get_data( # noqa: C901, D417 'Or an iterable of these objects.' ) from err tstarts, tstops = _get_time_intervals( - tst, tsp, bin_interval, return_isoformat=True + tst, tsp, inter, return_isoformat=True ) if isinstance(tstarts, (list, tuple)): tstamps_start.extend(tstarts) @@ -949,14 +958,16 @@ async def _get_request_response(self, url, session): ]) except _asyncio.TimeoutError as err: raise _exceptions.TimeoutError( - 'Timeout reached. Try to increase `query_timeout`.' + 'Timeout reached. Try to:\n - increase `query_timeout`;' + '\n - decrease `query_bin_interval`;' + '\n - decrease the time interval for the aquisition;' ) from err except _aio_exceptions.ClientPayloadError as err: raise _exceptions.PayloadError( - "Payload Error. Increasing `query_timeout` won't help. " - 'Try:\n - decreasing `query_bin_interval`;' - '\n - decrease the time interval for the aquisition;' - '\n - or changing the `query_max_concurrency` parameter' + 'Payload Error. This is probably due to some bug in the ' + 'code or some unexpected response from the server.\n' + 'Please, report this to the developers with the traceback ' + 'and the query url.' ) from err if single: diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index b6398a1fa..ec3b57964 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -370,12 +370,22 @@ def severity(self): @property def query_bin_interval(self): - """Query bin interval.""" + """Queries larger than this interval will be split. + + If set to 0 or None, no splitting will be done. + """ return self._query_bin_interval @query_bin_interval.setter def query_bin_interval(self, new_intvl): - self._query_bin_interval = int(new_intvl) + if new_intvl is None: + new_intvl = 0 + if not isinstance(new_intvl, (float, int)): + raise _exceptions.TypeError( + 'expected argument of type float or int, got ' + + str(type(new_intvl)) + ) + self._query_bin_interval = max(int(new_intvl), 0) @property def query_max_concurrency(self): @@ -746,7 +756,10 @@ def pvnames(self, new_pvnames): @property def query_bin_interval(self): - """Query bin interval.""" + """Queries larger than this interval will be split. + + If set to 0 or None, no splitting will be done. + """ qry = [self._pvdata[pvn].query_bin_interval for pvn in self._pvnames] if len(set(qry)) == 1: return qry[0] @@ -754,6 +767,8 @@ def query_bin_interval(self): @query_bin_interval.setter def query_bin_interval(self, value): + if value is None: + value = 0 if isinstance(value, (int, float)): value = len(self._pvnames) * [int(value)] if len(value) != len(self._pvnames): diff --git a/siriuspy/siriuspy/clientarch/time.py b/siriuspy/siriuspy/clientarch/time.py index d8a94c516..6be97303f 100644 --- a/siriuspy/siriuspy/clientarch/time.py +++ b/siriuspy/siriuspy/clientarch/time.py @@ -180,7 +180,8 @@ def get_time_intervals( Args: time_start (Time): start time. time_stop (Time): stop time. - interval (int): interval duration in seconds. + interval (int|float|None): interval duration in seconds. + If <= 0 or None, no splitting will be done. return_isoformat (bool): return in iso8601 format. tzinfo (tzinfo): timezone info. Defaults to None, which means using the timezone of `time_start`. @@ -192,8 +193,12 @@ def get_time_intervals( tzinfo = tzinfo or time_start.tzinfo t_start = time_start.timestamp() t_stop = time_stop.timestamp() - t_start = _np.arange(t_start, t_stop, int(interval)) - t_stop = _np.r_[t_start[1:], t_stop] + if interval is None or interval <= 0: + t_start = [t_start] + t_stop = [t_stop] + else: + t_start = _np.arange(t_start, t_stop, int(interval)) + t_stop = _np.r_[t_start[1:], t_stop] t_start = [Time(t, tzinfo=tzinfo) for t in t_start] t_stop = [Time(t, tzinfo=tzinfo) for t in t_stop] if return_isoformat: diff --git a/siriuspy/siriuspy/machshift/macreport.py b/siriuspy/siriuspy/machshift/macreport.py index 2b611f0ec..b212e74fa 100644 --- a/siriuspy/siriuspy/machshift/macreport.py +++ b/siriuspy/siriuspy/machshift/macreport.py @@ -1070,24 +1070,23 @@ def update(self): self._update_log(log_msg.format(self._current_pv, _time.time()-_t0)) - # Set query_bin_interval for the rest of PVs to the total time to - # avoid multiple queries and speed up the process. - tot = self._time_stop - self._time_start - tot = tot.total_seconds() + 1 - # macshift, interlock and stability indicators for pvn in self._pvnames: if pvn == self._current_pv: continue _t0 = _time.time() - self._pvdata[pvn].query_bin_interval = tot + # Set query_bin_interval for the rest of PVs to 0 to + # avoid multiple queries and speed up the process. + self._pvdata[pvn].query_bin_interval = 0 self._pvdata[pvn].update() self._update_log(log_msg.format(pvn, _time.time()-_t0)) # ps for group, pvdataset in self._pvdataset.items(): _t0 = _time.time() - pvdataset.query_bin_interval = tot + # Set query_bin_interval for the rest of PVs to 0 to + # avoid multiple queries and speed up the process. + pvdataset.query_bin_interval = 0 pvdataset.update() self._update_log(log_msg.format( 'SI PS '+group.capitalize(), _time.time()-_t0) From 35181b19218f32f5e79205240e6af30031aaf074 Mon Sep 17 00:00:00 2001 From: Fernando Date: Mon, 6 Apr 2026 11:50:07 -0300 Subject: [PATCH 52/60] DOC: (CLTARCH) Improves documentation of processing types property. --- siriuspy/siriuspy/clientarch/pvarch.py | 100 ++++++++++++++++++++++++- 1 file changed, 98 insertions(+), 2 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index ec3b57964..8546af896 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -461,7 +461,55 @@ def time_stop(self, new_time): @property def processing_type(self): - """Processing type.""" + """Data processing type to use for query. + + For details on each operator, please, refer to the section + Processing of data of the following page: + https://epicsarchiver.readthedocs.io/en/latest/user/userguide.html + + The options implemented here are: + + The options below do not take any aditional parameter: + '' --> No processing, raw data is returned. + 'ncount' --> total number of updates in the whole interval. + + All types of processing below, require an aditional parameter, + controlled by the input `processing_type_param1`. Then the + refered statistics will be performed within this interval: + 'mean' + 'median' + 'std' + 'variance' + 'popvariance' --> population variance. + 'kurtosis' + 'skewness' + 'mini' --> same as min, which is also accepted by the archiver. + 'maxi' --> same as max, which is also accepted by the archiver. + 'jitter' --> std / mean for each bin. + 'count' --> number of updates in each bin. + 'firstSample' + 'lastSample' + 'firstFill' --> see url for difference to `'firstSample'`. + 'lastFill' --> see url for difference to `'lastSample'`. + 'linear' --> not sure, look at the archiver docs. + 'loess' --> not sure, look at the archiver docs. + + The processing below also use an aditional parameter, but its + meaning is different from the statistics above: + 'optimized' --> the parameter means the total number of points + to be returned, instead of the time interval. + 'optimLastSample' --> close to 'opimized'. See docs for diff. + 'nth' --> return every nth sample. + 'deadBand' --> similar to ADEL. Only return when values change + by a certain amount. + + For both statistics below a second parameter is needed to configure + acquisition, controlled by `processing_type_param2`. This + parameter controls the number of standard deviations to consider + in the filtering bellow. The default of this parameter is 3.0: + 'ignoreflyers' --> whether to ignore outliers + 'flyers' --> only return outliers + """ return self._processing_type @processing_type.setter @@ -865,7 +913,55 @@ def timestamp_stop(self, value): @property def processing_type(self): - """Processing type.""" + """Data processing type to use for query. + + For details on each operator, please, refer to the section + Processing of data of the following page: + https://epicsarchiver.readthedocs.io/en/latest/user/userguide.html + + The options implemented here are: + + The options below do not take any aditional parameter: + '' --> No processing, raw data is returned. + 'ncount' --> total number of updates in the whole interval. + + All types of processing below, require an aditional parameter, + controlled by the input `proc_type_param1`. Then the + refered statistics will be performed within this interval: + 'mean' + 'median' + 'std' + 'variance' + 'popvariance' --> population variance. + 'kurtosis' + 'skewness' + 'mini' --> same as min, which is also accepted by the archiver. + 'maxi' --> same as max, which is also accepted by the archiver. + 'jitter' --> std / mean for each bin. + 'count' --> number of updates in each bin. + 'firstSample' + 'lastSample' + 'firstFill' --> see url for difference to `'firstSample'`. + 'lastFill' --> see url for difference to `'lastSample'`. + 'linear' --> not sure, look at the archiver docs. + 'loess' --> not sure, look at the archiver docs. + + The processing below also use an aditional parameter, but its + meaning is different from the statistics above: + 'optimized' --> the parameter means the total number of points + to be returned, instead of the time interval. + 'optimLastSample' --> close to 'opimized'. See docs for diff. + 'nth' --> return every nth sample. + 'deadBand' --> similar to ADEL. Only return when values change + by a certain amount. + + For both statistics below a second parameter is needed to configure + acquisition, controlled by `proc_type_param2`. This + parameter controls the number of standard deviations to consider + in the filtering bellow. The default of this parameter is 3.0: + 'ignoreflyers' --> whether to ignore outliers + 'flyers' --> only return outliers + """ proc = [self._pvdata[pvn].processing_type for pvn in self._pvnames] if len(set(proc)) == 1: return proc[0] From 5132f649b803601d39ff0b7f1f2d384a31bcd62a Mon Sep 17 00:00:00 2001 From: Fernando Date: Mon, 6 Apr 2026 13:50:28 -0300 Subject: [PATCH 53/60] DOC: (CLTARCH) Improve documentation and reorganize code order for more intuitive reading. --- siriuspy/siriuspy/clientarch/client.py | 294 ++++++++++++++++++------- siriuspy/siriuspy/clientarch/pvarch.py | 29 ++- 2 files changed, 229 insertions(+), 94 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 0b19b329b..a8b6eb888 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -110,34 +110,9 @@ def __init__(self, server_url=None, query_timeout=None): self.connect() _urllib3.disable_warnings(_urllib3.exceptions.InsecureRequestWarning) - def connect(self): - """Starts bg. event loop in a separate thread when in async mode.""" - if self._loop_alive(): - return - - self._loop = _asyncio.new_event_loop() - self._thread = _Thread(target=self._run_event_loop, daemon=True) - self._thread.start() - - def shutdown(self, timeout=5): - """Safely stops the bg. loop and waits for the thread to exit.""" - if not self._loop_alive(): - return - - # 1. Cancel all pending tasks in the loop (to avoid ResourceWarnings) - self._loop.call_soon_threadsafe(self._cancel_all_tasks) - - # 2. Schedule the loop to stop processing - self._loop.call_soon_threadsafe(self._loop.stop) - - # 3. Wait for the thread to actually finish - self._thread.join(timeout=timeout) - if self._thread.is_alive(): - print('Warning: Background thread did not stop in time.') - @property def connected(self): - """Connected.""" + """Return whether the archiver is connected.""" if not self._loop_alive(): return False try: @@ -146,14 +121,9 @@ def connected(self): return False @property - def query_timeout(self): - """Request timeout for each query.""" - return self._query_timeout - - @query_timeout.setter - def query_timeout(self, value): - """Set request timeout for each query.""" - self._query_timeout = float(value) + def last_requested_url(self): + """Return the last requested URL or URLs.""" + return self._request_url @property def server_url(self): @@ -176,6 +146,18 @@ def server_url(self, url): self.logout() self._url = url + # ------------ query related properties -------------- + + @property + def query_timeout(self): + """Request timeout for each query.""" + return self._query_timeout + + @query_timeout.setter + def query_timeout(self, value): + """Set request timeout for each query.""" + self._query_timeout = float(value) + @property def query_bin_interval(self): """Queries larger than this interval will be split. @@ -209,10 +191,32 @@ def query_max_concurrency(self, new_val): ) self._query_max_concurrency = int(new_val) - @property - def last_requested_url(self): - """.""" - return self._request_url + # ------------- methods to control client behavior -------------- + + def connect(self): + """Starts bg. event loop in a separate thread when in async mode.""" + if self._loop_alive(): + return + + self._loop = _asyncio.new_event_loop() + self._thread = _Thread(target=self._run_event_loop, daemon=True) + self._thread.start() + + def shutdown(self, timeout=5): + """Safely stops the bg. loop and waits for the thread to exit.""" + if not self._loop_alive(): + return + + # 1. Cancel all pending tasks in the loop (to avoid ResourceWarnings) + self._loop.call_soon_threadsafe(self._cancel_all_tasks) + + # 2. Schedule the loop to stop processing + self._loop.call_soon_threadsafe(self._loop.stop) + + # 3. Wait for the thread to actually finish + self._thread.join(timeout=timeout) + if self._thread.is_alive(): + print('Warning: Background thread did not stop in time.') def login(self, username, password=None): """Login to the Archiver server. @@ -258,9 +262,31 @@ def logout(self): self.session = None return resp + def switch_to_online_data(self): + """Switch to online data. + + Sets server URL to online data URL and logs out if needed. + """ + self.server_url = _envars.SRVURL_ARCHIVER + self.logout() + + def switch_to_offline_data(self): + """Switch to offline data. + + Sets server URL to offline data URL and logs out if needed. + """ + self.server_url = _envars.SRVURL_ARCHIVER_OFFLINE_DATA + self.logout() + + # ------------- methods to get PVs informations -------------- + def get_pvs_info(self, wildcards='*', max_num_pvs=-1): """Get PVs Info. + Call method `getPVStatus` of the Archiver Appliance, which returns a + list of PVs matching the wildcards, with some details about each PV, + such as its type, connection status, etc. + Args: wildcards (str|list|tuple): Wildcards to match. max_num_pvs (int): Maximum number of PVs to return. @@ -282,6 +308,9 @@ def get_pvs_info(self, wildcards='*', max_num_pvs=-1): def get_all_pvs(self, wildcards='*', max_num_pvs=-1): """Get All PVs matching wildcards. + Call method `getAllPVs` of the Archiver Appliance, which returns a + list of PVs matching the wildcards. + Args: wildcards (str|list|tuple): Wildcards to match. max_num_pvs (int): Maximum number of PVs to return. @@ -299,21 +328,51 @@ def get_all_pvs(self, wildcards='*', max_num_pvs=-1): resp = self.make_request(url) return None if not resp else resp - def delete_pvs(self, pvnames, delete_data=False): - """Delete PVs.""" - if not isinstance(pvnames, (list, tuple)): - pvnames = (pvnames,) + def get_pv_details(self, pvname, get_request_url=False): + """Get PV Details. - delete_data = 'true' if delete_data else 'false' - for pvname in pvnames: - url = self._create_url( - method='deletePV', pv=pvname, deleteData=delete_data - ) - self.make_request(url, need_login=True) + Call method `getPVDetails` of the Archiver Appliance, which returns + PVs details regarding its archiving status. + + Args: + pvname (str): Name of the PV to get details. + get_request_url (bool): Whether to only return request url. + + Returns: + list (None | list): List of dictionary with PVs details. + """ + url = self._create_url(method='getPVDetails', pv=pvname) + if get_request_url: + return url + resp = self.make_request(url) + return None if not resp else resp + + def get_pv_type_info(self, pvname: str): + """Get PV Type Info. + + Call method `getPVTypeInfo` of the Archiver Appliance, which returns + Archiving information for a PV, such as its archiving policy. + + Args: + pvname (str): Name of the PV to get type info. + + Returns: + list: List of dictionary with PVs details. + + """ + url = self._create_url(method='getPVTypeInfo', pv=pvname) + resp = self.make_request(url) + return None if not resp else resp + + # ------------- methods to get appliance metrics -------------- def get_detailed_appliance_metrics(self): """Get detailed appliance metrics for archiver appliance. + Call method `getApplianceMetricsForAppliance` of the Archiver + Appliance, which returns a list of metrics for the archiver + appliance. + Returns: response (dict|None): Response of the request. """ @@ -327,6 +386,10 @@ def get_detailed_appliance_metrics(self): def get_process_metrics_for_appliance(self): """Get process metrics for archiver appliance. + Call method `getProcessMetricsDataForAppliance` of the Archiver + Appliance, which returns a list of metrics for the processing consumed + by the archiver appliance. + Returns: response (dict|None): Response of the request. The metrics that are returned in case of success are: @@ -345,6 +408,26 @@ def get_process_metrics_for_appliance(self): def get_report(self, report_name='PausedPVs', max_num_pvs=None): """Get Paused PVs Report. + Call report methods of the Archiver Appliance. Possible reports are: + - DisconnectedPVs --> `getCurrentlyDisconnectedPVs` + - PausedPVs --> `getPausedPVsReport` + - EventRate --> `getEventRateReport` + - StorageRate --> `getStorageRateReport` + - RecentlyAddedPVs --> `getRecentlyAddedPVs` + - RecentlyModifiedPVs --> `getRecentlyModifiedPVs` + - LostConnections --> `getLostConnectionsReport` + - LastKnownTimestamps --> `getSilentPVsReport` + - DroppedEventsWrongTimestamp --> `getPVsByDroppedEventsTimestamp` + - DroppedEventsBufferOverflow --> `getPVsByDroppedEventsBuffer` + - DroppedEventsTypeChange --> `getPVsByDroppedEventsTypeChange` + For details on the content of each report, please, refer to the + Archiver Appliance documentation. + + The results of each report will be unprocessed in a json dict. + In case you want a processed report, please, refer to the specific + methods for each report, such as `get_recently_modified_pvs` for the + `RecentlyModifiedPVs` report. + Args: report_name (str): Report name. Use self.ReportTypes to get all available reports. @@ -369,6 +452,13 @@ def get_recently_modified_pvs(self, max_num_pvs=None, epoch_time=True): Currently version of the epics archiver appliance returns pvname list from oldest to newest modified timestamps. + + Args: + max_num_pvs (int): Maximum number of PVs to return. + epoch_time (bool): Convert timestamps to epoch. + + Returns: + list: List of dictionary with PVs details. """ resp = self.get_report( self, @@ -379,50 +469,104 @@ def get_recently_modified_pvs(self, max_num_pvs=None, epoch_time=True): # convert to epoch, if the case if resp and epoch_time: for item in resp: - modtime = item['modificationTime'][ - :-7 - ] # remove ISO8601 offset + modtime = item['modificationTime'][:-7] # rm. ISO8601 offset epoch_time = _Time.conv_to_epoch(modtime, '%b/%d/%Y %H:%M:%S') item['modificationTime'] = epoch_time return None if not resp else resp - def get_pv_type_info(self, pvname: str): - """Get PV Type Info. + # ------------- Management of PVs methods -------------- + + def delete_pvs(self, pvnames, delete_data=False): + """Delete PVs. + + Call method `deletePV` of the Archiver Appliance, which deletes PVs. + + This method requires that self.login() is called first. Args: - pvname (str): Name of the PV to get type info. + pvnames (str|list|tuple): PVs to delete. + delete_data (bool): Delete data associated with the PVs. Returns: - list: List of dictionary with PVs details. - + response (list): Response of the request for each PV. """ - url = self._create_url(method='getPVTypeInfo', pv=pvname) - resp = self.make_request(url) - return None if not resp else resp + if not isinstance(pvnames, (list, tuple)): + pvnames = (pvnames,) + + delete_data = 'true' if delete_data else 'false' + ret = [] + for pvname in pvnames: + url = self._create_url( + method='deletePV', pv=pvname, deleteData=delete_data + ) + ret.append(self.make_request(url, need_login=True)) + return ret def pause_pvs(self, pvnames): - """Pause PVs.""" + """Pause PVs. + + Call method `pauseArchivingPV` of the Archiver Appliance, which pauses + archiving for a PV. + + This method requires that self.login() is called first. + + Args: + pvnames (list|tuple): List of PVs to pause. + + Returns: + response (list): Response of the request for each PV. + """ if not isinstance(pvnames, (list, tuple)): pvnames = (pvnames,) + ret = [] for pvname in pvnames: url = self._create_url(method='pauseArchivingPV', pv=pvname) - self.make_request(url, need_login=True) + ret.append(self.make_request(url, need_login=True)) + return ret def rename_pv(self, oldname, newname): - """Rename PVs.""" + """Rename PVs. + + Call method `renamePV` of the Archiver Appliance, which renames a PV. + + This method requires that self.login() is called first. + + Args: + oldname (str): Old PV name. + newname (str): New PV name. + + Returns: + response (dict|None): Response of the request. + """ url = self._create_url(method='renamePV', pv=oldname, newname=newname) return self.make_request(url, need_login=True) def resume_pvs(self, pvnames): - """Resume PVs.""" + """Resume PVs. + + Call method `resumeArchivingPV` of the Archiver Appliance, which + resumes archiving for a PV. + + This method requires that self.login() is called first. + + Args: + pvnames (list|tuple): List of PVs to resume. + + Returns: + response (list): Response of the request for each PV. + """ if not isinstance(pvnames, (list, tuple)): pvnames = (pvnames,) + ret = [] for pvname in pvnames: url = self._create_url(method='resumeArchivingPV', pv=pvname) - self.make_request(url, need_login=True) + ret.append(self.make_request(url, need_login=True)) + return ret + + # ------------- methods related to get_data -------------- - def get_data( # noqa: D417 + def get_data( self, pvnames, timestamp_start, @@ -528,7 +672,7 @@ def get_data( # noqa: D417 return self.process_resquest_of_get_data(pvnames, resps, pvn2idcs) - def get_request_url_for_get_data( # noqa: C901, D417 + def get_request_url_for_get_data( # noqa: C901 self, pvnames, timestamp_start, @@ -740,23 +884,7 @@ def process_resquest_of_get_data(self, pvnames, resps, pvn2idcs): return pvn2resp[pvnames[0]] return pvn2resp - def get_pv_details(self, pvname, get_request_url=False): - """Get PV Details.""" - url = self._create_url(method='getPVDetails', pv=pvname) - if get_request_url: - return url - resp = self.make_request(url) - return None if not resp else resp - - def switch_to_online_data(self): - """.""" - self.server_url = _envars.SRVURL_ARCHIVER - self.session = None - - def switch_to_offline_data(self): - """.""" - self.server_url = _envars.SRVURL_ARCHIVER_OFFLINE_DATA - self.session = None + # ------------- General purpose methods -------------- def make_request(self, url, need_login=False): """Make request. diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index 8546af896..e64614fe2 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -185,15 +185,6 @@ def request_url(self): url = self.connector.get_pv_details(self.pvname, get_request_url=True) return url - @property - def is_archived(self): - """.""" - self.connect() - data = self.connector.get_pv_details(self.pvname) - if not data: - return False - return True - def update(self, query_timeout=None): # noqa: C901 """.""" self.connect() @@ -331,6 +322,8 @@ def __str__(self): stg += 'Not loaded yet.\n' return stg + # -------- PV data properties -------- + @property def pvname(self): """PVName.""" @@ -368,6 +361,8 @@ def severity(self): """Severity data.""" return self._severity + # ------- PV data acquisition and processing properties -------- + @property def query_bin_interval(self): """Queries larger than this interval will be split. @@ -527,7 +522,12 @@ def processing_type(self, new_type): @property def processing_type_param1(self): - """Processing type param1.""" + """Processing type param1. + + For most processing types, this is a time interval in seconds, but for + some types, it has a different meaning. Please, refer to the + documentation of `processing_type` for details. + """ return self._processing_type_param1 @processing_type_param1.setter @@ -541,7 +541,12 @@ def processing_type_param1(self, new_param): @property def processing_type_param2(self): - """Processing type param2.""" + """Processing type param2. + + See docs for `processing_type`. For most processing types, this is not + used, but for some types, it controls the number of standard + deviations to consider in outlier filtering, with a default value of 3. + """ return self._processing_type_param2 @processing_type_param2.setter @@ -792,6 +797,8 @@ def __str__(self): ) return stg + # -------- Properties to control data acquisition and processing -------- + @property def pvnames(self): """PV names.""" From 71fb2097ef82bf242fc11f687029132870f5e93c Mon Sep 17 00:00:00 2001 From: Fernando Date: Mon, 6 Apr 2026 13:51:40 -0300 Subject: [PATCH 54/60] ENH: (CLTARCH.PVARCH) Improve detection of online/offline state. --- siriuspy/siriuspy/clientarch/pvarch.py | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index e64614fe2..49638216c 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -5,7 +5,7 @@ import numpy as _np from mathphys.functions import ( load_pickle as _load_pickle, - save_pickle as _save_pickle, + save_pickle as _save_pickle ) from .. import envars as _envars @@ -17,9 +17,8 @@ class _Base: def __init__(self, connector=None, offline_data=False): self._connector = None - self._offline_data = offline_data self.connector = connector - self.connect() + self.connect(offline_data=offline_data) @property def is_archived(self): @@ -27,12 +26,12 @@ def is_archived(self): self.connect() return self.connector.get_pv_details(self.pvname) is not None - def connect(self): + def connect(self, offline_data=False): """Connect.""" if self.connector is None: url_off = _envars.SRVURL_ARCHIVER_OFFLINE_DATA url_on = _envars.SRVURL_ARCHIVER - url = url_off if self._offline_data else url_on + url = url_off if offline_data else url_on self._connector = _ClientArchiver(server_url=url) @property @@ -55,8 +54,17 @@ def connector(self, conn): @property def is_offline_data(self): - """.""" - return self._offline_data + """Whether server url points to online or offline data. + + Return None in case the url is not recognized as either online or + offline. + """ + if self._connector.server_url == _envars.SRVURL_ARCHIVER_OFFLINE_DATA: + return True + elif self._connector.server_url == _envars.SRVURL_ARCHIVER: + return False + else: + return None @property def query_timeout(self): From 7149fb6a384e5ea09f05a47ee91909a8caf334c3 Mon Sep 17 00:00:00 2001 From: Fernando Date: Mon, 6 Apr 2026 13:52:09 -0300 Subject: [PATCH 55/60] ENH: (CLTARCH.PVARCH) Adapt serialization of PVData and PVDataSet. --- siriuspy/siriuspy/clientarch/pvarch.py | 45 +++++++++++++++++--------- 1 file changed, 29 insertions(+), 16 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index 49638216c..2e10981c9 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -339,15 +339,17 @@ def pvname(self): @property def request_url(self): - """Request url.""" + """Get request url.""" self.connect() - url = self.connector.get_data( - self.pvname, - self.time_start.get_iso8601(), - self.time_stop.get_iso8601(), - get_request_url=True, + return self.connector.get_request_url_for_get_data( + self._pvname, + self.time_start, + self.time_stop, + query_bin_interval=self.query_bin_interval, + proc_type=self.processing_type, + proc_type_param1=self.processing_type_param1, + proc_type_param2=self.processing_type_param2, ) - return url @property def timestamp(self): @@ -668,6 +670,12 @@ def to_dict(self): pvname (str): the name of the PV. timestamp_start (time.time): start of acquisition time. timestamp_stop (time.time): end of acquisition time. + query_bin_interval (int): bin interval for queries. + query_max_concurrency (int): max concurrency for queries. + query_timeout (float): timeout for queries. + processing_type (str): type of processing for queries. + processing_type_param1 (float or int): param 1 for processing. + processing_type_param2 (float or int): param 2 for processing. data (dict): dictionary with archiver data with fields: value (numpy.ndarray): values of the PV. timestamp (numpy.ndarray): timestamps of the PV. @@ -680,6 +688,12 @@ def to_dict(self): pvname=self.pvname, timestamp_start=self.timestamp_start, timestamp_stop=self.timestamp_stop, + query_bin_interval=self.query_bin_interval, + query_max_concurrency=self.query_max_concurrency, + query_timeout=self.query_timeout, + processing_type=self.processing_type, + processing_type_param1=self.processing_type_param1, + processing_type_param2=self.processing_type_param2, data=dict( timestamp=self.timestamp, value=self.value, @@ -703,6 +717,12 @@ def from_dict(infos): pvdata = PVData(infos['pvname'], connector=infos['server_url']) pvdata.timestamp_start = infos['timestamp_start'] pvdata.timestamp_stop = infos['timestamp_stop'] + pvdata.query_bin_interval = infos['query_bin_interval'] + pvdata.query_max_concurrency = infos['query_max_concurrency'] + pvdata.query_timeout = infos['query_timeout'] + pvdata.processing_type = infos['processing_type'] + pvdata.processing_type_param1 = infos['processing_type_param1'] + pvdata.processing_type_param2 = infos['processing_type_param2'] pvdata.set_data(**infos['data']) return pvdata @@ -1203,12 +1223,7 @@ def to_dict(self): all PVs. Compatible input for PVData.from_dict. """ - data = dict( - server_url=self.connector.server_url, - pvnames=self.pvnames, - timestamp_start=self.timestamp_start, - timestamp_stop=self.timestamp_stop, - ) + data = dict(server_url=self.connector.server_url, pvnames=self.pvnames) data['pvdata_info'] = [self[pvn].to_dict() for pvn in self._pvnames] return data @@ -1225,10 +1240,8 @@ def from_dict(info): """ pvdataset = PVDataSet(info['pvnames'], info['server_url']) - pvdataset.timestamp_start = info['timestamp_start'] - pvdataset.timestamp_stop = info['timestamp_stop'] for i, pvdata in enumerate(pvdataset): - pvdata.set_data(**info['pvdata_info'][i]['data']) + pvdata.from_dict(**info['pvdata_info'][i]) return pvdataset def to_pickle(self, fname, overwrite=False): From 3f40457b7573f61940de11b3adb11a64d430ec68 Mon Sep 17 00:00:00 2001 From: Fernando Date: Mon, 6 Apr 2026 14:44:31 -0300 Subject: [PATCH 56/60] ENH: (CLTARCH.TIME) Make Time.now() work as expected, returning timezone aware object. --- siriuspy/siriuspy/clientarch/time.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/siriuspy/siriuspy/clientarch/time.py b/siriuspy/siriuspy/clientarch/time.py index 6be97303f..874d92297 100644 --- a/siriuspy/siriuspy/clientarch/time.py +++ b/siriuspy/siriuspy/clientarch/time.py @@ -140,6 +140,16 @@ def get_iso8601(self): """Get iso8601 format.""" return self.astimezone(self.tzinfo).isoformat() + @staticmethod + def now(tz=None): + """Get current time.""" + return super().now(tz).astimezone(tz) + + @staticmethod + def utcnow(): + """Get current UTC time.""" + raise NotImplementedError('utcnow is not implemented for Time class.') + def __add__(self, other): """Addition.""" if isinstance(other, _datetime): From 4d37303dba3d319a270eb350f3169be9cc3dcf2c Mon Sep 17 00:00:00 2001 From: Fernando Date: Mon, 6 Apr 2026 14:45:44 -0300 Subject: [PATCH 57/60] API: (CLTARCH) Remove timestamp_start and timestamp_stop. And initialize _time_start and _time_stop with Time.now() --- siriuspy/siriuspy/clientarch/devices.py | 2 +- siriuspy/siriuspy/clientarch/pvarch.py | 145 ++++++----------------- siriuspy/siriuspy/machshift/macreport.py | 42 ++----- 3 files changed, 42 insertions(+), 147 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/devices.py b/siriuspy/siriuspy/clientarch/devices.py index 49766c863..38cdddaf2 100644 --- a/siriuspy/siriuspy/clientarch/devices.py +++ b/siriuspy/siriuspy/clientarch/devices.py @@ -87,7 +87,7 @@ def _interpolate_data(self): mean_sec /= nr_pvs # times vector - t0_, t1_ = self.timestamp_start, self.timestamp_stop + t0_, t1_ = self.time_start.timestamp(), self.time_stop.timestamp() times = _np.arange(t0_, t1_, mean_sec) # builds orbit matrix using interpolation diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index 2e10981c9..4eda4bd44 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -270,8 +270,8 @@ def __init__(self, pvname, connector=None, offline_data=False): self._value = None self._status = None self._severity = None - self._time_start = None - self._time_stop = None + self._time_start = _Time.now() + self._time_stop = self._time_start self._query_bin_interval = self.connector.query_bin_interval self._processing_type = self.ProcessingTypes.None_ self._processing_type_param1 = None @@ -289,10 +289,8 @@ def __str__(self): ) stg += '\nPV Data Properties:\n' - tss = self.time_start - tss = tss.get_iso8601() if tss else 'Not Defined.' - tsp = self.time_stop - tsp = tsp.get_iso8601() if tsp else 'Not Defined.' + tss = self.time_start.get_iso8601() + tsp = self.time_stop.get_iso8601() stg += ' {:<30s}: {:}\n'.format('pvname', self.pvname) stg += ' {:<30s}: {:}\n'.format('time_start', tss) stg += ' {:<30s}: {:}\n'.format('time_stop', tsp) @@ -406,63 +404,31 @@ def query_max_concurrency(self): def query_max_concurrency(self, new_intvl): self.connector.query_max_concurrency = new_intvl - @property - def timestamp_start(self): - """Timestamp start.""" - if not self._time_start: - return None - return self._time_start.timestamp() - - @timestamp_start.setter - def timestamp_start(self, new_timestamp): - if not isinstance(new_timestamp, (float, int)): - raise _exceptions.TypeError( - 'expected argument of type float or int, got ' - + str(type(new_timestamp)) - ) - self._time_start = _Time(timestamp=new_timestamp) - @property def time_start(self): - """Time start.""" + """Time start. + + Return siriuspy.clientarch.time.Time object. + """ return self._time_start @time_start.setter def time_start(self, new_time): - if not isinstance(new_time, _Time): - raise _exceptions.TypeError( - 'expected argument of type Time, got ' + str(type(new_time)) - ) - self._time_start = new_time - - @property - def timestamp_stop(self): - """Timestamp stop.""" - if not self._time_stop: - return None - return self._time_stop.timestamp() - - @timestamp_stop.setter - def timestamp_stop(self, new_timestamp): - if not isinstance(new_timestamp, (float, int)): - raise _exceptions.TypeError( - 'expected argument of type float or int, got ' - + str(type(new_timestamp)) - ) - self._time_stop = _Time(timestamp=new_timestamp) + """Accept any value that can be converted to a Time object.""" + self._time_start = _Time(new_time) @property def time_stop(self): - """Time stop.""" + """Time stop. + + Return siriuspy.clientarch.time.Time object. + """ return self._time_stop @time_stop.setter def time_stop(self, new_time): - if not isinstance(new_time, _Time): - raise _exceptions.TypeError( - 'expected argument of type Time, got ' + str(type(new_time)) - ) - self._time_stop = new_time + """Accept any value that can be converted to a Time object.""" + self._time_stop = _Time(new_time) @property def processing_type(self): @@ -576,10 +542,6 @@ def update(self, query_timeout=None): query_timeout0 = self.query_timeout self.query_timeout = query_timeout - if None in (self.timestamp_start, self.timestamp_stop): - print('Start and stop timestamps not defined! Aborting.') - return - try: data = self.connector.get_data( self._pvname, @@ -686,8 +648,8 @@ def to_dict(self): return dict( server_url=self.connector.server_url, pvname=self.pvname, - timestamp_start=self.timestamp_start, - timestamp_stop=self.timestamp_stop, + timestamp_start=self.time_start.timestamp(), + timestamp_stop=self.time_stop.timestamp(), query_bin_interval=self.query_bin_interval, query_max_concurrency=self.query_max_concurrency, query_timeout=self.query_timeout, @@ -715,8 +677,8 @@ def from_dict(infos): """ pvdata = PVData(infos['pvname'], connector=infos['server_url']) - pvdata.timestamp_start = infos['timestamp_start'] - pvdata.timestamp_stop = infos['timestamp_stop'] + pvdata.time_start = infos['timestamp_start'] + pvdata.time_stop = infos['timestamp_stop'] pvdata.query_bin_interval = infos['query_bin_interval'] pvdata.query_max_concurrency = infos['query_max_concurrency'] pvdata.query_timeout = infos['query_timeout'] @@ -809,14 +771,10 @@ def __str__(self): if pvd.timestamp is not None: stg += f'{len(pvd.timestamp):d}' - tss = pvd.time_start - tss = tss.get_iso8601() if tss else 'Not Def.' - tsp = pvd.time_stop - tsp = tsp.get_iso8601() if tsp else 'Not Def.' stg += tmpl.format( pvn, - tss, - tsp, + pvd.time_start.get_iso8601(), + pvd.time_stop.get_iso8601(), f'{pvd.query_bin_interval:d}', prty, pr1s, @@ -884,32 +842,18 @@ def time_start(self): @time_start.setter def time_start(self, value): - if isinstance(value, _Time): - value = len(self._pvnames) * [value] + """Accept any value that can be converted to a Time object.""" + try: + value = _Time(value) + value = [value] * len(self._pvnames) + except Exception: # noqa: S110 + pass if len(value) != len(self._pvnames): raise ValueError('value must have the same length as pvnames') for pvn, val in zip(self._pvnames, value): # noqa: B905 self._pvdata[pvn].time_start = val - @property - def timestamp_start(self): - """Start timestamp.""" - tstt = [self._pvdata[pvn].timestamp_start for pvn in self._pvnames] - if len(set(tstt)) == 1: - return tstt[0] - return tstt - - @timestamp_start.setter - def timestamp_start(self, value): - if isinstance(value, (int, float)): - value = len(self._pvnames) * [value] - if len(value) != len(self._pvnames): - raise ValueError('value must have the same length as pvnames') - - for pvn, val in zip(self._pvnames, value): # noqa: B905 - self._pvdata[pvn].timestamp_start = val - @property def time_stop(self): """Stop time.""" @@ -920,32 +864,18 @@ def time_stop(self): @time_stop.setter def time_stop(self, value): - if isinstance(value, _Time): - value = len(self._pvnames) * [value] + """Accept any value that can be converted to a Time object.""" + try: + value = _Time(value) + value = [value] * len(self._pvnames) + except Exception: # noqa: S110 + pass if len(value) != len(self._pvnames): raise ValueError('value must have the same length as pvnames') for pvn, val in zip(self._pvnames, value): # noqa: B905 self._pvdata[pvn].time_stop = val - @property - def timestamp_stop(self): - """Stop timestamp.""" - tstt = [self._pvdata[pvn].timestamp_stop for pvn in self._pvnames] - if len(set(tstt)) == 1: - return tstt[0] - return tstt - - @timestamp_stop.setter - def timestamp_stop(self, value): - if isinstance(value, (int, float)): - value = len(self._pvnames) * [value] - if len(value) != len(self._pvnames): - raise ValueError('value must have the same length as pvnames') - - for pvn, val in zip(self._pvnames, value): # noqa: B905 - self._pvdata[pvn].timestamp_stop = val - @property def processing_type(self): """Data processing type to use for query. @@ -1089,13 +1019,6 @@ def update(self, query_timeout=None): pvn2idcs = dict() for pvn in self._pvnames: pvd = self._pvdata[pvn] - if None in (pvd.timestamp_start, pvd.timestamp_stop): - print( - f'Start and stop times not defined for PV {pvn}! Aborting.' - ) - if query_timeout is not None: - self.query_timeout = query_timeout0 - return urls = self.connector.get_request_url_for_get_data( pvn, pvd.time_start, diff --git a/siriuspy/siriuspy/machshift/macreport.py b/siriuspy/siriuspy/machshift/macreport.py index b212e74fa..90fb01772 100644 --- a/siriuspy/siriuspy/machshift/macreport.py +++ b/siriuspy/siriuspy/machshift/macreport.py @@ -282,8 +282,8 @@ def __init__(self, connector=None, logger=None): self._init_connectors() # query data - self._time_start = None - self._time_stop = None + self._time_start = _Time.now() + self._time_stop = self._time_start # user shift stats self._usershift_progmd_time = None @@ -439,19 +439,6 @@ def logger(self, new_logger): datefmt='%F %T', level=_log.INFO, stream=_sys.stdout) - @property - def timestamp_start(self): - """Query interval start timestamp.""" - if not self._time_start: - return None - return self._time_start.timestamp() - - @timestamp_start.setter - def timestamp_start(self, new_timestamp): - if not isinstance(new_timestamp, (float, int)): - raise TypeError('expected argument of type float or int') - self._time_start = _Time(timestamp=new_timestamp) - @property def time_start(self): """Time start.""" @@ -459,22 +446,8 @@ def time_start(self): @time_start.setter def time_start(self, new_time): - if not isinstance(new_time, _Time): - raise TypeError('expected argument of type Time') - self._time_start = new_time - - @property - def timestamp_stop(self): - """Query interval stop timestamp.""" - if not self._time_stop: - return None - return self._time_stop.timestamp() - - @timestamp_stop.setter - def timestamp_stop(self, new_timestamp): - if not isinstance(new_timestamp, (float, int)): - raise TypeError('expected argument of type float or int') - self._time_stop = _Time(timestamp=new_timestamp) + """Accept any value that can be converted to a Time object.""" + self._time_start = _Time(new_time) @property def time_stop(self): @@ -483,9 +456,8 @@ def time_stop(self): @time_stop.setter def time_stop(self, new_time): - if not isinstance(new_time, _Time): - raise TypeError('expected argument of type Time') - self._time_stop = new_time + """Accept any value that can be converted to a Time object.""" + self._time_stop = _Time(new_time) # user shift stats @@ -1095,7 +1067,7 @@ def update(self): self._compute_stats() def plot_raw_data(self): - """Plot raw data for period timestamp_start to timestamp_stop.""" + """Plot raw data for period time_start to time_stop.""" if not self._raw_data: print('No data to display. Call update() to get data.') return From d6cab449fe8bf930b38583853b3622c498287395 Mon Sep 17 00:00:00 2001 From: Fernando Date: Mon, 6 Apr 2026 15:59:21 -0300 Subject: [PATCH 58/60] BUG: (CLTARCH.TIME) There is no super() in staticmethods. --- siriuspy/siriuspy/clientarch/time.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/siriuspy/siriuspy/clientarch/time.py b/siriuspy/siriuspy/clientarch/time.py index 874d92297..8e2c19c08 100644 --- a/siriuspy/siriuspy/clientarch/time.py +++ b/siriuspy/siriuspy/clientarch/time.py @@ -143,7 +143,7 @@ def get_iso8601(self): @staticmethod def now(tz=None): """Get current time.""" - return super().now(tz).astimezone(tz) + return _datetime.now(tz).astimezone(tz) @staticmethod def utcnow(): From 4d7d98193423a630f0f7d32fcb394affca29a044 Mon Sep 17 00:00:00 2001 From: Fernando Date: Mon, 6 Apr 2026 16:45:48 -0300 Subject: [PATCH 59/60] ENH: (CLTARCH.CLT) Add protection against negative timeouts in setter. Also, remove deprecated hidden attributes. --- siriuspy/siriuspy/clientarch/client.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index a8b6eb888..921b55089 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -99,12 +99,10 @@ def __init__(self, server_url=None, query_timeout=None): """Initialize.""" query_timeout = query_timeout or ClientArchiver.DEFAULT_QUERY_TIMEOUT self.session = None - self._aiohttp_session = None - self._requests_session = None - self._query_timeout = query_timeout self._url = server_url or self.SERVER_URL self._request_url = None self._thread = self._loop = self._semaphore = None + self._query_timeout = query_timeout self._query_bin_interval = self.DEF_QUERY_BIN_INTERVAL self._query_max_concurrency = self.DEF_QUERY_MAX_CONCURRENCY self.connect() @@ -156,7 +154,7 @@ def query_timeout(self): @query_timeout.setter def query_timeout(self, value): """Set request timeout for each query.""" - self._query_timeout = float(value) + self._query_timeout = max(float(value), 0) @property def query_bin_interval(self): From 0bebf9f97ceecbdc74704cf94f8ae7aa5f678d74 Mon Sep 17 00:00:00 2001 From: Fernando Date: Tue, 7 Apr 2026 08:30:10 -0300 Subject: [PATCH 60/60] API: (CLTARCH) Change `query_bin_interval` to `query_split_interval`. --- siriuspy/siriuspy/clientarch/client.py | 44 ++++++++++++------------ siriuspy/siriuspy/clientarch/devices.py | 2 +- siriuspy/siriuspy/clientarch/pvarch.py | 38 ++++++++++---------- siriuspy/siriuspy/machshift/macreport.py | 10 +++--- 4 files changed, 47 insertions(+), 47 deletions(-) diff --git a/siriuspy/siriuspy/clientarch/client.py b/siriuspy/siriuspy/clientarch/client.py index 921b55089..bf0163b4a 100644 --- a/siriuspy/siriuspy/clientarch/client.py +++ b/siriuspy/siriuspy/clientarch/client.py @@ -37,7 +37,7 @@ class ClientArchiver: """Archiver Data Fetcher class.""" - DEF_QUERY_BIN_INTERVAL = 12 * 60 * 60 # 12h + DEF_QUERY_SPLIT_INTERVAL = 12 * 60 * 60 # 12h DEF_QUERY_MAX_CONCURRENCY = 100 # maximum number of concurrent queries DEFAULT_QUERY_TIMEOUT = 5.0 # [s] SERVER_URL = _envars.SRVURL_ARCHIVER @@ -103,7 +103,7 @@ def __init__(self, server_url=None, query_timeout=None): self._request_url = None self._thread = self._loop = self._semaphore = None self._query_timeout = query_timeout - self._query_bin_interval = self.DEF_QUERY_BIN_INTERVAL + self._query_split_interval = self.DEF_QUERY_SPLIT_INTERVAL self._query_max_concurrency = self.DEF_QUERY_MAX_CONCURRENCY self.connect() _urllib3.disable_warnings(_urllib3.exceptions.InsecureRequestWarning) @@ -157,15 +157,15 @@ def query_timeout(self, value): self._query_timeout = max(float(value), 0) @property - def query_bin_interval(self): + def query_split_interval(self): """Queries larger than this interval will be split. If set to 0 or None, no splitting will be done. """ - return self._query_bin_interval + return self._query_split_interval - @query_bin_interval.setter - def query_bin_interval(self, new_intvl): + @query_split_interval.setter + def query_split_interval(self, new_intvl): if new_intvl is None: new_intvl = 0 if not isinstance(new_intvl, (float, int)): @@ -173,7 +173,7 @@ def query_bin_interval(self, new_intvl): 'expected argument of type float or int, got ' + str(type(new_intvl)) ) - self._query_bin_interval = max(int(new_intvl), 0) + self._query_split_interval = max(int(new_intvl), 0) @property def query_max_concurrency(self): @@ -569,7 +569,7 @@ def get_data( pvnames, timestamp_start, timestamp_stop, - query_bin_interval=None, + query_split_interval=None, proc_type='', proc_type_param1=None, proc_type_param2=3.0, @@ -586,11 +586,11 @@ def get_data( If it is a list or tuple, all PVs will be queried for each of the time intervals. In this case, it must have the same length as `timestamp_start`. - query_bin_interval (int): overwrites `self.query_bin_interval`. - Defaults to `self.query_bin_interval`. Maximum interval for + query_split_interval (int): overwrites `self.query_split_interval`. + Defaults to `self.query_split_interval`. Maximum interval for queries. If - `timestamp_stop - timestamp_start > query_bin_interval`, - it will be split into parallel queries. If query_bin_interval<=0, + `timestamp_stop - timestamp_start > query_split_interval`, + it will be split into parallel queries. If query_split_interval<=0, no splitting will be done. proc_type (str): data processing type to use for query. Defaults to ''. For details on each operator, please, refer to the section @@ -656,7 +656,7 @@ def get_data( pvnames, timestamp_start, timestamp_stop, - query_bin_interval=query_bin_interval, + query_split_interval=query_split_interval, proc_type=proc_type, proc_type_param1=proc_type_param1, proc_type_param2=proc_type_param2, @@ -675,7 +675,7 @@ def get_request_url_for_get_data( # noqa: C901 pvnames, timestamp_start, timestamp_stop, - query_bin_interval=None, + query_split_interval=None, proc_type=None, proc_type_param1=None, proc_type_param2=None, @@ -693,11 +693,11 @@ def get_request_url_for_get_data( # noqa: C901 If it is a list or tuple, all PVs will be queried for each of the time intervals. In this case, it must have the same length as `timestamp_start`. - query_bin_interval (int): overwrites `self.query_bin_interval`. - Defaults to `self.query_bin_interval`. Maximum interval for + query_split_interval (int): overwrites `self.query_split_interval`. + Defaults to `self.query_split_interval`. Maximum interval for queries. If - `timestamp_stop - timestamp_start > query_bin_interval`, - it will be split into parallel queries. If query_bin_interval<=0, + `timestamp_stop - timestamp_start > query_split_interval`, + it will be split into parallel queries. If query_split_interval<=0, no splitting will be done. proc_type (str): data processing type to use for query. Defaults to ''. For details on each operator, please, refer to the section @@ -770,9 +770,9 @@ def get_request_url_for_get_data( # noqa: C901 '`timestamp_start` and `timestamp_stop` must have same length.' ) - inter = self.query_bin_interval - if query_bin_interval is not None: - inter = query_bin_interval + inter = self.query_split_interval + if query_split_interval is not None: + inter = query_split_interval tstamps_start = [] tstamps_stop = [] @@ -1085,7 +1085,7 @@ async def _get_request_response(self, url, session): except _asyncio.TimeoutError as err: raise _exceptions.TimeoutError( 'Timeout reached. Try to:\n - increase `query_timeout`;' - '\n - decrease `query_bin_interval`;' + '\n - decrease `query_split_interval`;' '\n - decrease the time interval for the aquisition;' ) from err except _aio_exceptions.ClientPayloadError as err: diff --git a/siriuspy/siriuspy/clientarch/devices.py b/siriuspy/siriuspy/clientarch/devices.py index 38cdddaf2..ea73b4cf0 100644 --- a/siriuspy/siriuspy/clientarch/devices.py +++ b/siriuspy/siriuspy/clientarch/devices.py @@ -40,7 +40,7 @@ def __init__(self, devname, propty='', connector=None): self._times = None self._values = None super().__init__(pvnames, connector=connector) - self.query_bin_interval = 3600 + self.query_split_interval = 3600 self.processing_type = self.ProcessingTypes.Mean self.processing_type_param1 = 1 diff --git a/siriuspy/siriuspy/clientarch/pvarch.py b/siriuspy/siriuspy/clientarch/pvarch.py index 4eda4bd44..35aada5b8 100644 --- a/siriuspy/siriuspy/clientarch/pvarch.py +++ b/siriuspy/siriuspy/clientarch/pvarch.py @@ -272,7 +272,7 @@ def __init__(self, pvname, connector=None, offline_data=False): self._severity = None self._time_start = _Time.now() self._time_stop = self._time_start - self._query_bin_interval = self.connector.query_bin_interval + self._query_split_interval = self.connector.query_split_interval self._processing_type = self.ProcessingTypes.None_ self._processing_type_param1 = None self._processing_type_param2 = 3.0 # number of sigma @@ -295,7 +295,7 @@ def __str__(self): stg += ' {:<30s}: {:}\n'.format('time_start', tss) stg += ' {:<30s}: {:}\n'.format('time_stop', tsp) stg += ' {:<30s}: {:d}\n'.format( - 'query_bin_interval [s]', self.query_bin_interval + 'query_split_interval [s]', self.query_split_interval ) prty = self.processing_type pr1 = self.processing_type_param1 @@ -343,7 +343,7 @@ def request_url(self): self._pvname, self.time_start, self.time_stop, - query_bin_interval=self.query_bin_interval, + query_split_interval=self.query_split_interval, proc_type=self.processing_type, proc_type_param1=self.processing_type_param1, proc_type_param2=self.processing_type_param2, @@ -372,15 +372,15 @@ def severity(self): # ------- PV data acquisition and processing properties -------- @property - def query_bin_interval(self): + def query_split_interval(self): """Queries larger than this interval will be split. If set to 0 or None, no splitting will be done. """ - return self._query_bin_interval + return self._query_split_interval - @query_bin_interval.setter - def query_bin_interval(self, new_intvl): + @query_split_interval.setter + def query_split_interval(self, new_intvl): if new_intvl is None: new_intvl = 0 if not isinstance(new_intvl, (float, int)): @@ -388,7 +388,7 @@ def query_bin_interval(self, new_intvl): 'expected argument of type float or int, got ' + str(type(new_intvl)) ) - self._query_bin_interval = max(int(new_intvl), 0) + self._query_split_interval = max(int(new_intvl), 0) @property def query_max_concurrency(self): @@ -547,7 +547,7 @@ def update(self, query_timeout=None): self._pvname, self.time_start, self.time_stop, - query_bin_interval=self.query_bin_interval, + query_split_interval=self.query_split_interval, proc_type=self.processing_type, proc_type_param1=self.processing_type_param1, proc_type_param2=self.processing_type_param2, @@ -632,7 +632,7 @@ def to_dict(self): pvname (str): the name of the PV. timestamp_start (time.time): start of acquisition time. timestamp_stop (time.time): end of acquisition time. - query_bin_interval (int): bin interval for queries. + query_split_interval (int): interval to split queries. query_max_concurrency (int): max concurrency for queries. query_timeout (float): timeout for queries. processing_type (str): type of processing for queries. @@ -650,7 +650,7 @@ def to_dict(self): pvname=self.pvname, timestamp_start=self.time_start.timestamp(), timestamp_stop=self.time_stop.timestamp(), - query_bin_interval=self.query_bin_interval, + query_split_interval=self.query_split_interval, query_max_concurrency=self.query_max_concurrency, query_timeout=self.query_timeout, processing_type=self.processing_type, @@ -679,7 +679,7 @@ def from_dict(infos): pvdata = PVData(infos['pvname'], connector=infos['server_url']) pvdata.time_start = infos['timestamp_start'] pvdata.time_stop = infos['timestamp_stop'] - pvdata.query_bin_interval = infos['query_bin_interval'] + pvdata.query_split_interval = infos['query_split_interval'] pvdata.query_max_concurrency = infos['query_max_concurrency'] pvdata.query_timeout = infos['query_timeout'] pvdata.processing_type = infos['processing_type'] @@ -775,7 +775,7 @@ def __str__(self): pvn, pvd.time_start.get_iso8601(), pvd.time_stop.get_iso8601(), - f'{pvd.query_bin_interval:d}', + f'{pvd.query_split_interval:d}', prty, pr1s, pr2, @@ -796,18 +796,18 @@ def pvnames(self, new_pvnames): self._pvdata = self._init_pvdatas(new_pvnames, self.connector) @property - def query_bin_interval(self): + def query_split_interval(self): """Queries larger than this interval will be split. If set to 0 or None, no splitting will be done. """ - qry = [self._pvdata[pvn].query_bin_interval for pvn in self._pvnames] + qry = [self._pvdata[pvn].query_split_interval for pvn in self._pvnames] if len(set(qry)) == 1: return qry[0] return qry - @query_bin_interval.setter - def query_bin_interval(self, value): + @query_split_interval.setter + def query_split_interval(self, value): if value is None: value = 0 if isinstance(value, (int, float)): @@ -816,7 +816,7 @@ def query_bin_interval(self, value): raise ValueError('value must have the same length as pvnames') for pvn, val in zip(self._pvnames, value): # noqa: B905 - self._pvdata[pvn].query_bin_interval = val + self._pvdata[pvn].query_split_interval = val @property def query_max_concurrency(self): @@ -1023,7 +1023,7 @@ def update(self, query_timeout=None): pvn, pvd.time_start, pvd.time_stop, - query_bin_interval=pvd.query_bin_interval, + query_split_interval=pvd.query_split_interval, proc_type=pvd.processing_type, proc_type_param1=pvd.processing_type_param1, proc_type_param2=pvd.processing_type_param2, diff --git a/siriuspy/siriuspy/machshift/macreport.py b/siriuspy/siriuspy/machshift/macreport.py index 90fb01772..2a264de5e 100644 --- a/siriuspy/siriuspy/machshift/macreport.py +++ b/siriuspy/siriuspy/machshift/macreport.py @@ -1035,7 +1035,7 @@ def update(self): # current _t0 = _time.time() pvd = self._pvdata[self._current_pv] - pvd.query_bin_interval = 60 * 60 * 6 + pvd.query_split_interval = 60 * 60 * 6 pvd.processing_type = pvd.ProcessingTypes.Mean pvd.processing_type_param1 = MacReport.QUERY_AVG_TIME pvd.update() @@ -1047,18 +1047,18 @@ def update(self): if pvn == self._current_pv: continue _t0 = _time.time() - # Set query_bin_interval for the rest of PVs to 0 to + # Set query_split_interval for the rest of PVs to 0 to # avoid multiple queries and speed up the process. - self._pvdata[pvn].query_bin_interval = 0 + self._pvdata[pvn].query_split_interval = 0 self._pvdata[pvn].update() self._update_log(log_msg.format(pvn, _time.time()-_t0)) # ps for group, pvdataset in self._pvdataset.items(): _t0 = _time.time() - # Set query_bin_interval for the rest of PVs to 0 to + # Set query_split_interval for the rest of PVs to 0 to # avoid multiple queries and speed up the process. - pvdataset.query_bin_interval = 0 + pvdataset.query_split_interval = 0 pvdataset.update() self._update_log(log_msg.format( 'SI PS '+group.capitalize(), _time.time()-_t0)