From c121db31cb0a3276c6225dfcdd783411aa477f2a Mon Sep 17 00:00:00 2001 From: Harrison-Oatman <46121828+Harrison-Oatman@users.noreply.github.com> Date: Fri, 9 Jan 2026 23:03:05 -0500 Subject: [PATCH 1/2] Processes inherit from base process and register message handling methods --- src/pyclm/core/__init__.py | 6 +- src/pyclm/core/base_process.py | 80 +++++++++++ src/pyclm/core/manager.py | 119 ++++++++--------- src/pyclm/core/microscope.py | 19 ++- .../core/{patterns => }/pattern_process.py | 124 +++++++----------- src/pyclm/core/patterns/__init__.py | 39 +++++- src/pyclm/core/segmentation/__init__.py | 2 +- .../segmentation_process.py | 54 ++++---- tests/test_base_process.py | 51 +++++++ 9 files changed, 318 insertions(+), 176 deletions(-) create mode 100644 src/pyclm/core/base_process.py rename src/pyclm/core/{patterns => }/pattern_process.py (64%) rename src/pyclm/core/{segmentation => }/segmentation_process.py (80%) create mode 100644 tests/test_base_process.py diff --git a/src/pyclm/core/__init__.py b/src/pyclm/core/__init__.py index 0678b0f..059b18f 100644 --- a/src/pyclm/core/__init__.py +++ b/src/pyclm/core/__init__.py @@ -6,6 +6,8 @@ from .experiments import ExperimentSchedule from .manager import Manager, MicroscopeOutbox, SLMBuffer from .microscope import MicroscopeProcess -from .patterns import ROI, CameraProperties, PatternMethod, PatternProcess +from .pattern_process import PatternProcess +from .patterns import ROI, CameraProperties, PatternMethod from .queues import AllQueues -from .segmentation import SegmentationMethod, SegmentationProcess +from .segmentation import SegmentationMethod +from .segmentation_process import SegmentationProcess diff --git a/src/pyclm/core/base_process.py b/src/pyclm/core/base_process.py new file mode 100644 index 0000000..1b7257c --- /dev/null +++ b/src/pyclm/core/base_process.py @@ -0,0 +1,80 @@ +import logging +from collections.abc import Callable +from multiprocessing import Queue +from queue import Empty +from threading import Event +from time import sleep +from typing import Any, NamedTuple + +logger = logging.getLogger(__name__) + + +class QueueHandler(NamedTuple): + queue: Queue + handler: Callable[[Any], None] + + +class BaseProcess: + """ + Base class for processes that poll queues. + Eliminates busy waiting by sleeping when all queues are empty. + """ + + def __init__(self, stop_event: Event | None = None, name: str = "process"): + self.stop_event = stop_event + self.name = name + self.queues: list[QueueHandler] = [] + self.sleep_interval = 0.001 + + def register_queue(self, queue: Queue, handler: Callable[[Any], bool | None]): + """ + Register a queue to be polled. + :param queue: The multiprocessing Queue to poll. + :param handler: A callable that takes the item from the queue. + It can optionally return True to signal the process loop to break (stop). + """ + self.queues.append(QueueHandler(queue, handler)) + + def process(self): + """ + Main process loop. + Polls all registered queues. SLEEPS if no work was done in a cycle. + """ + logger.info(f"Started {self.name}") + + while True: + if self.stop_event and self.stop_event.is_set(): + logger.info(f"Force closing {self.name}") + break + + did_work = False + + for q_handler in self.queues: + queue = q_handler.queue + handler = q_handler.handler + + if not queue.empty(): + try: + item = queue.get_nowait() + + should_stop = handler(item) + if should_stop: + logger.info( + f"{self.name} received stop signal from handler" + ) + return + + did_work = True + except Empty: + # Race condition handling: empty() said False but get_nowait() raised Empty + pass + except Exception as e: + logger.error( + f"Error handling item in {self.name}: {e}", exc_info=True + ) + + # If no queues had items, sleep briefly to avoid 100% CPU + if not did_work: + sleep(self.sleep_interval) + + logger.info(f"Stopped {self.name}") diff --git a/src/pyclm/core/manager.py b/src/pyclm/core/manager.py index ce335cb..e843429 100644 --- a/src/pyclm/core/manager.py +++ b/src/pyclm/core/manager.py @@ -18,6 +18,8 @@ from cv2 import warpAffine from h5py import File +from pyclm.core.pattern_process import RequestPattern + from .datatypes import ( AcquisitionData, CameraPattern, @@ -35,7 +37,6 @@ Experiment, ExperimentSchedule, ImagingConfig, - PositionWithAutoFocus, TimeCourse, ) from .messages import ( @@ -47,7 +48,6 @@ UpdateZPositionMessage, ) from .patterns import AcquiredImageRequest -from .patterns.pattern_process import RequestPattern from .queues import AllQueues logger = logging.getLogger(__name__) @@ -55,51 +55,47 @@ from threading import Event +from .base_process import BaseProcess + -class DataPassingProcess(metaclass=ABCMeta): +class DataPassingProcess(BaseProcess, metaclass=ABCMeta): def __init__(self, aq: AllQueues, stop_event: Event | None = None): + super().__init__(stop_event, name="data passing process") self.all_queues = aq - self.stop_event = stop_event self.message_history = [] + # Subclasses should set these or register queues manually self.from_manager = None self.data_in = None - self.class_name = "data passing process" - - def process(self): - while True: - if self.stop_event and self.stop_event.is_set(): - print(f"{self.class_name} force closing") - break - - if not self.from_manager.empty(): - msg = self.from_manager.get() - - must_break = self.handle_message(msg) - - if must_break: - break + def initialize_queues(self): + # Helper to register standard queues if subclasses set attributes + if self.from_manager: + self.register_queue(self.from_manager, self.handle_message_wrapper) - for data_channel in self.data_in: - # print(self.class_name, data_channel) - if not data_channel.empty(): - data = data_channel.get() + if self.data_in: + for q in self.data_in: + self.register_queue(q, self.handle_data_wrapper) - if isinstance(data, Message): - print(self.class_name, data.message) - must_break = self.handle_message(data) - - if must_break: - print(f"{self.class_name} exiting from message") - return True + def handle_message_wrapper(self, msg): + """Wrapper to handle return value logic expected by BaseProcess""" + if isinstance(msg, Message): + # BaseProcess expects True to stop + return self.handle_message(msg) + return False - assert isinstance(data, GenericData), ( - f"Unexpected data type: {type(data)}, expected subtype of GenericData" - ) + def handle_data_wrapper(self, data): + """Wrapper to handle data or message in data channel""" + if isinstance(data, Message): + print(self.name, data.message) + return self.handle_message(data) - self.handle_data(data) + assert isinstance(data, GenericData), ( + f"Unexpected data type: {type(data)}, expected subtype of GenericData" + ) + self.handle_data(data) + return False @abstractmethod def handle_data(self, data): @@ -127,6 +123,7 @@ def __init__( stop_event: Event | None = None, ): super().__init__(aq, stop_event) + self.name = "microscope outbox" if base_path is None: base_path = Path().cwd() @@ -145,7 +142,7 @@ def __init__( self.base_path = base_path self.save_type = save_type - self.class_name = "microscope outbox" + self.initialize_queues() def handle_data(self, data): aq_event = data.event @@ -155,7 +152,7 @@ def handle_data(self, data): if isinstance(data, SegmentationData): return - print(aq_event) + # print(aq_event) if aq_event.segment: self.seg_queue.put(data) @@ -208,36 +205,39 @@ def write_data(self, data: AcquisitionData): if isinstance(data, SegmentationData): dset_name = r"seg" - if self.save_type == "tif": - fullpath = self.base_path / file_relpath / relpath - fullpath.mkdir(parents=True) + try: + if self.save_type == "tif": + fullpath = self.base_path / file_relpath / relpath + fullpath.mkdir(parents=True, exist_ok=True) - tifffile.imwrite(fullpath / "data.tif", data.data) + tifffile.imwrite(fullpath / "data.tif", data.data) - else: - filepath = self.base_path / f"{file_relpath}.hdf5" - with File(filepath, "a") as f: - if aq_event.save_output: - dset = f.create_dataset(relpath + dset_name, data=data.data) - aq_event.write_attrs(dset) - - if isinstance(data, StimulationData): - if aq_event.save_stim: - dset = f.create_dataset(relpath + r"dmd", data=data.dmd_pattern) - dset.attrs["pattern_id"] = str(data.pattern_id) + else: + filepath = self.base_path / f"{file_relpath}.hdf5" + # Ensure directory exists + filepath.parent.mkdir(parents=True, exist_ok=True) + + with File(filepath, "a") as f: + if aq_event.save_output: + dset = f.create_dataset(relpath + dset_name, data=data.data) aq_event.write_attrs(dset) - # def test_write_data(self): - # aq_event = AcquisitionEvent("test", Position(1, 2, 0), scheduled_time=0, exposure_time_ms=1, - # sub_axes=[0, "test"]) - # data = AcquisitionData(aq_event, np.random.rand(100, 100)) - # - # self.write_data(data) + if isinstance(data, StimulationData): + if aq_event.save_stim: + dset = f.create_dataset( + relpath + r"dmd", data=data.dmd_pattern + ) + dset.attrs["pattern_id"] = str(data.pattern_id) + aq_event.write_attrs(dset) + + except Exception as e: + logger.error(f"Failed to write data: {e}", exc_info=True) class SLMBuffer(DataPassingProcess): def __init__(self, aq: AllQueues, stop_event: Event | None = None): super().__init__(aq, stop_event) + self.name = "slm buffer" self.from_manager = aq.manager_to_slm_buffer self.data_in = [aq.pattern_to_slm] @@ -251,15 +251,12 @@ def __init__(self, aq: AllQueues, stop_event: Event | None = None): self.slm_shape = None self.affine_transform = None - self.slm_shape = None - self.affine_transform = None - self.initialized = False self.manager_done = False self.pattern_done = False - self.class_name = "slm buffer" + self.initialize_queues() def initialize( self, diff --git a/src/pyclm/core/microscope.py b/src/pyclm/core/microscope.py index 445b249..0cb7f22 100644 --- a/src/pyclm/core/microscope.py +++ b/src/pyclm/core/microscope.py @@ -14,12 +14,16 @@ logger = logging.getLogger(__name__) -class MicroscopeProcess: +from .base_process import BaseProcess + + +class MicroscopeProcess(BaseProcess): def __init__( self, core: CMMCorePlus, aq: AllQueues, stop_event: Event | None = None ): + super().__init__(stop_event, name="microscope") self.core = core - self.stop_event = stop_event + self.inbox = aq.manager_to_microscope # receives messages/events from manager self.manager = aq.microscope_to_manager # send messages to manager self.outbox = aq.acquisition_outbox # send acquisition data to outbox process @@ -37,6 +41,15 @@ def __init__( self.warned_binning = False + # We handle msg manually in process override due to complex logic, + # or we could register it and use member vars for await/timeouts. + # Given the complexity of `process` (handling timeouts etc), we will OVERRIDE BaseProcess.process + # but replicate the "sleep if idle" behavior if we were polling multiple queues. + # But here only one inbox... actually `slm_queue` is read inside `handle_update_pattern_event`. + + # For now, let's keep `process` mostly as is but add the sleep from BaseProcess if empty to be "nice" + # AND respect the BaseProcess structure. + def declare_slm(self): core = self.core dev = core.getSLMDevice() @@ -75,6 +88,8 @@ def process(self, event_await_s=0, slm_await_s=5): f"No events in queue for {time() - event_await_start: .3f}s" ) + # Sleep briefly to be nice + sleep(self.sleep_interval) continue msg = self.inbox.get() diff --git a/src/pyclm/core/patterns/pattern_process.py b/src/pyclm/core/pattern_process.py similarity index 64% rename from src/pyclm/core/patterns/pattern_process.py rename to src/pyclm/core/pattern_process.py index e934223..e83cff7 100644 --- a/src/pyclm/core/patterns/pattern_process.py +++ b/src/pyclm/core/pattern_process.py @@ -2,53 +2,31 @@ from threading import Event from typing import ClassVar -from ..datatypes import AcquisitionData, CameraPattern, SegmentationData -from ..experiments import Experiment -from ..messages import Message -from ..queues import AllQueues -from .bar_patterns import BarPatternBase, BouncingBarPattern, SawToothMethod -from .feedback_control_patterns import ( - BounceModel, - MoveDownModel, - MoveInModel, - MoveOutModel, - RotateCcwModel, -) -from .ktr_patterns import BinaryNucleusClampModel, CenteredImageModel, GlobalCycleModel -from .pattern import ( +from .datatypes import AcquisitionData, CameraPattern, SegmentationData +from .experiments import Experiment +from .messages import Message, StreamCloseMessage +from .queues import AllQueues + +logger = logging.getLogger(__name__) + +from .base_process import BaseProcess +from .patterns import ( AcquiredImageRequest, CameraProperties, DataDock, PatternContext, PatternMethod, PatternMethodReturnsSLM, - PatternReview, + known_models, ) -from .static_patterns import CirclePattern, FullOnPattern - -logger = logging.getLogger(__name__) -class PatternProcess: - known_models: ClassVar = { - "circle": CirclePattern, - "bar": BarPatternBase, - "pattern_review": PatternReview, - "bar_bounce": BouncingBarPattern, - "full_on": FullOnPattern, - "rotate_ccw": RotateCcwModel, - "sawtooth": SawToothMethod, - "move_out": MoveOutModel, - "move_in": MoveInModel, - "move_down": MoveDownModel, - "fb_bounce": BounceModel, - "binary_nucleus_clamp": BinaryNucleusClampModel, - "global_cycle": GlobalCycleModel, - "centered_image": CenteredImageModel, - } +class PatternProcess(BaseProcess): + known_models: ClassVar = known_models def __init__(self, aq: AllQueues, stop_event: Event | None = None): - self.stop_event = stop_event + super().__init__(stop_event, name="pattern") + self.inbox = aq.manager_to_pattern self.manager = aq.pattern_to_manager self.slm = aq.pattern_to_slm @@ -64,6 +42,10 @@ def __init__(self, aq: AllQueues, stop_event: Event | None = None): self.models = {} self.docks = {} + self.register_queue(self.inbox, self.handle_message_wrapper) + self.register_queue(self.from_raw, self.handle_from_raw) + self.register_queue(self.from_seg, self.handle_from_seg) + def initialize(self, camera_properties: CameraProperties): self.camera_properties = camera_properties @@ -157,9 +139,6 @@ def handle_message(self, message: Message): self.stream_count += 1 if self.stream_count >= 2: - # Signal SLMBuffer that we are done - from ..messages import StreamCloseMessage - out_msg = StreamCloseMessage() self.slm.put(out_msg) return True @@ -187,53 +166,44 @@ def handle_message(self, message: Message): case _: raise NotImplementedError - def process(self): - while True: - if self.stop_event and self.stop_event.is_set(): - print("force stopping pattern process") - break - - if not self.inbox.empty(): - msg = self.inbox.get() - - if self.handle_message(msg): - return - - if not self.from_raw.empty(): - data = self.from_raw.get() + def handle_message_wrapper(self, message): + if self.handle_message(message): + return True + return False - if isinstance(data, Message): - if self.handle_message(data): - return - else: - assert isinstance(data, AcquisitionData) - name = data.event.experiment_name - t_index = data.event.t_index - - dockname = self.dock_string(name, t_index) + def handle_from_raw(self, data): + if isinstance(data, Message): + if self.handle_message(data): + return True + else: + assert isinstance(data, AcquisitionData) + name = data.event.experiment_name + t_index = data.event.t_index - self.docks[dockname].add_raw(data) + dockname = self.dock_string(name, t_index) - self.check(name, dockname) + self.docks[dockname].add_raw(data) - if not self.from_seg.empty(): - data = self.from_seg.get() + self.check(name, dockname) + return False - if isinstance(data, Message): - if self.handle_message(data): - return - else: - assert isinstance(data, SegmentationData) - name = data.event.experiment_name - t_index = data.event.t_index + def handle_from_seg(self, data): + if isinstance(data, Message): + if self.handle_message(data): + return True + else: + assert isinstance(data, SegmentationData) + name = data.event.experiment_name + t_index = data.event.t_index - dockname = self.dock_string(name, t_index) + dockname = self.dock_string(name, t_index) - print(f"seg found {dockname}") + print(f"seg found {dockname}") - self.docks[dockname].add_seg(data) + self.docks[dockname].add_seg(data) - self.check(name, dockname) + self.check(name, dockname) + return False class RequestPattern(Message): diff --git a/src/pyclm/core/patterns/__init__.py b/src/pyclm/core/patterns/__init__.py index 086b228..63f37f2 100644 --- a/src/pyclm/core/patterns/__init__.py +++ b/src/pyclm/core/patterns/__init__.py @@ -1,2 +1,37 @@ -from .pattern import ROI, AcquiredImageRequest, CameraProperties, PatternMethod -from .pattern_process import PatternProcess +from .bar_patterns import BarPatternBase, BouncingBarPattern, SawToothMethod +from .feedback_control_patterns import ( + BounceModel, + MoveDownModel, + MoveInModel, + MoveOutModel, + RotateCcwModel, +) +from .ktr_patterns import BinaryNucleusClampModel, CenteredImageModel, GlobalCycleModel +from .pattern import ( + ROI, + AcquiredImageRequest, + CameraProperties, + DataDock, + PatternContext, + PatternMethod, + PatternMethodReturnsSLM, + PatternReview, +) +from .static_patterns import CirclePattern, FullOnPattern + +known_models = { + "circle": CirclePattern, + "bar": BarPatternBase, + "pattern_review": PatternReview, + "bar_bounce": BouncingBarPattern, + "full_on": FullOnPattern, + "rotate_ccw": RotateCcwModel, + "sawtooth": SawToothMethod, + "move_out": MoveOutModel, + "move_in": MoveInModel, + "move_down": MoveDownModel, + "fb_bounce": BounceModel, + "binary_nucleus_clamp": BinaryNucleusClampModel, + "global_cycle": GlobalCycleModel, + "centered_image": CenteredImageModel, +} diff --git a/src/pyclm/core/segmentation/__init__.py b/src/pyclm/core/segmentation/__init__.py index d1db544..a37536e 100644 --- a/src/pyclm/core/segmentation/__init__.py +++ b/src/pyclm/core/segmentation/__init__.py @@ -1,2 +1,2 @@ +from .cellpose_segmentation import CellposeSegmentationMethod from .segmentation import SegmentationMethod -from .segmentation_process import SegmentationProcess diff --git a/src/pyclm/core/segmentation/segmentation_process.py b/src/pyclm/core/segmentation_process.py similarity index 80% rename from src/pyclm/core/segmentation/segmentation_process.py rename to src/pyclm/core/segmentation_process.py index 85c0f0a..6337d5e 100644 --- a/src/pyclm/core/segmentation/segmentation_process.py +++ b/src/pyclm/core/segmentation_process.py @@ -2,19 +2,21 @@ from threading import Event from typing import ClassVar -from ..datatypes import AcquisitionData, SegmentationData -from ..experiments import Experiment -from ..messages import Message -from ..queues import AllQueues -from ..segmentation import SegmentationMethod -from .cellpose_segmentation import CellposeSegmentationMethod +from .base_process import BaseProcess +from .datatypes import AcquisitionData, SegmentationData +from .experiments import Experiment +from .messages import Message +from .queues import AllQueues +from .segmentation import SegmentationMethod +from .segmentation.cellpose_segmentation import CellposeSegmentationMethod -class SegmentationProcess: +class SegmentationProcess(BaseProcess): known_models: ClassVar = {"cellpose": CellposeSegmentationMethod} def __init__(self, aq: AllQueues, stop_event: Event | None = None): - self.stop_event = stop_event + super().__init__(stop_event, name="segmentation") + self.inbox = aq.manager_to_seg self.manager = aq.seg_to_manager @@ -30,6 +32,9 @@ def __init__(self, aq: AllQueues, stop_event: Event | None = None): self.accommodated_requests = [] self.shared_resources = dict() + self.register_queue(self.inbox, self.handle_message) + self.register_queue(self.from_raw, self.handle_from_raw) + def initialize(self): self.initialized = True @@ -142,7 +147,7 @@ def handle_message(self, message: Message): logging.info( "segmentation process received stream close from outbox. Sending to outbox and pattern" ) - from ..messages import StreamCloseMessage + from .messages import StreamCloseMessage close_msg = StreamCloseMessage() self.to_pattern.put(close_msg) @@ -154,25 +159,12 @@ def handle_message(self, message: Message): case _: raise NotImplementedError - def process(self): - while True: - if self.stop_event and self.stop_event.is_set(): - print("force closing segmentation process") - break - - if not self.inbox.empty(): - msg = self.inbox.get() - - if self.handle_message(msg): - return True - - if not self.from_raw.empty(): - data = self.from_raw.get() - - if isinstance(data, Message): - if self.handle_message(data): - print("segmentation process closing") - return True - else: - assert isinstance(data, AcquisitionData) - self.handle_segment_data(data) + def handle_from_raw(self, data): + if isinstance(data, Message): + if self.handle_message(data): + print("segmentation process closing") + return True + else: + assert isinstance(data, AcquisitionData) + self.handle_segment_data(data) + return False diff --git a/tests/test_base_process.py b/tests/test_base_process.py new file mode 100644 index 0000000..b1ad208 --- /dev/null +++ b/tests/test_base_process.py @@ -0,0 +1,51 @@ +import threading +import time +from multiprocessing import Queue +from queue import Empty + +import pytest + +from pyclm.core.base_process import BaseProcess + + +class MockProcess(BaseProcess): + def __init__(self, stop_event=None): + super().__init__(stop_event, name="mock") + self.inbox = Queue() + self.register_queue(self.inbox, self.handle_message) + self.processed_count = 0 + + def handle_message(self, msg): + if msg == "stop": + return True + self.processed_count += 1 + return False + + +def test_process_loop(): + stop_event = threading.Event() + process = MockProcess(stop_event) + + # Run process in a separate thread because it blocks + t = threading.Thread(target=process.process) + t.start() + + try: + # Send messages + process.inbox.put("msg1") + process.inbox.put("msg2") + + # Allow time to process + time.sleep(0.1) + + assert process.processed_count == 2 + + # Test stop signal via message + process.inbox.put("stop") + t.join(timeout=1.0) + assert not t.is_alive() + + finally: + if t.is_alive(): + stop_event.set() + t.join() From 5ea0b78562b7e0b8b192378e7ce88608f9436d8c Mon Sep 17 00:00:00 2001 From: Harrison-Oatman <46121828+Harrison-Oatman@users.noreply.github.com> Date: Fri, 9 Jan 2026 23:06:01 -0500 Subject: [PATCH 2/2] remove excessive comments --- src/pyclm/core/microscope.py | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/src/pyclm/core/microscope.py b/src/pyclm/core/microscope.py index 0cb7f22..b38a070 100644 --- a/src/pyclm/core/microscope.py +++ b/src/pyclm/core/microscope.py @@ -41,15 +41,6 @@ def __init__( self.warned_binning = False - # We handle msg manually in process override due to complex logic, - # or we could register it and use member vars for await/timeouts. - # Given the complexity of `process` (handling timeouts etc), we will OVERRIDE BaseProcess.process - # but replicate the "sleep if idle" behavior if we were polling multiple queues. - # But here only one inbox... actually `slm_queue` is read inside `handle_update_pattern_event`. - - # For now, let's keep `process` mostly as is but add the sleep from BaseProcess if empty to be "nice" - # AND respect the BaseProcess structure. - def declare_slm(self): core = self.core dev = core.getSLMDevice() @@ -299,8 +290,6 @@ def handle_acquisition_event(self, aq_event: AcquisitionEvent): sleep(1.0) - # print(aq_event.position.get_z(), self.core.getPosition()) - logger.info(f"{self.t(): .3f}| acquiring image: {aq_event.exposure_time_ms}ms") image = self.snap() aq_event.completed_time = time() @@ -308,8 +297,6 @@ def handle_acquisition_event(self, aq_event: AcquisitionEvent): aq_event.pixel_width_um = self.core.getPixelSizeUm() - # info(f"{self.t(): .3f}| unloading") - if aq_event.needs_slm: data_out = StimulationData( aq_event, image, self.current_pattern, self.current_pattern_id @@ -318,7 +305,6 @@ def handle_acquisition_event(self, aq_event: AcquisitionEvent): data_out = AcquisitionData(aq_event, image) self.outbox.put(data_out) - # info(f"{self.t(): .3f}| unloaded") def snap(self): core = self.core @@ -330,11 +316,3 @@ def snap(self): def t(self): return time() - self.start - - # tagged_image = core.getTaggedImage() - # pixels = np.reshape(tagged_image.pix, - # newshape=[tagged_image.tags['Height'], tagged_image.tags['Width']]) - # - # tags = tagged_image.tags - # - # return pixels, tags