diff --git a/.github/workflows/sync-tag-definitions.yml b/.github/workflows/sync-tag-definitions.yml
new file mode 100644
index 0000000..bfb4373
--- /dev/null
+++ b/.github/workflows/sync-tag-definitions.yml
@@ -0,0 +1,93 @@
+name: Sync Tag Type Definitions
+
+on:
+ # Run weekly on Monday at 00:00 UTC
+ schedule:
+ - cron: '0 0 * * 1'
+
+ # Allow manual trigger
+ workflow_dispatch:
+
+permissions:
+ contents: write
+ pull-requests: write
+
+jobs:
+ sync-tag-definitions:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: '3.12'
+
+ - name: Fetch tag type definitions from OpenEPaperLink
+ id: fetch
+ run: python3 scripts/fetch_tag_types.py new_tag_types.json
+
+ - name: Generate updated const.py
+ id: generate
+ run: |
+ python3 scripts/generate_tag_types.py new_tag_types.json
+ rm -f new_tag_types.json
+
+ - name: Close existing automated PR
+ if: steps.generate.outputs.changed == 'true'
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ # Find and close any existing open PRs on the automated branch
+ existing_prs=$(gh pr list --repo "${{ github.repository }}" --head "automated/sync-tag-definitions" --state open --json number --jq '.[].number')
+ for pr_number in $existing_prs; do
+ echo "Closing existing PR #$pr_number"
+ gh pr close "$pr_number" --repo "${{ github.repository }}" --comment "Superseded by a newer automated update."
+ done
+
+ - name: Create Pull Request
+ if: steps.generate.outputs.changed == 'true'
+ uses: peter-evans/create-pull-request@v6
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+ commit-message: 'Update tag type definitions from OpenEPaperLink'
+ title: 'chore: Update tag type definitions from OpenEPaperLink'
+ body: |
+ This PR automatically updates the fallback tag type definitions to match the latest definitions from the OpenEPaperLink repository.
+
+ ## Changes
+
+ ${{ steps.generate.outputs.summary }}
+
+ ## Source
+
+ Definitions fetched from: https://github.com/OpenEPaperLink/OpenEPaperLink/tree/master/resources/tagtypes
+
+ ## Notes
+
+ - Only required fields are included: `version`, `name`, `width`, `height`
+ - Optional fields (`bpp`, `rotatebuffer`) use defaults from TagType class
+
+ ---
+
+ *This PR was automatically created by the sync-tag-definitions workflow*
+ branch: 'automated/sync-tag-definitions'
+ delete-branch: true
+ add-paths: |
+ custom_components/opendisplay/const.py
+ labels: |
+ automated
+ dependencies
+
+ - name: Summary
+ run: |
+ if [ "${{ steps.generate.outputs.changed }}" == "true" ]; then
+ echo "✅ Changes detected - PR created"
+ echo "${{ steps.generate.outputs.summary }}"
+ else
+ echo "✅ No changes detected - definitions are up to date"
+ fi
diff --git a/custom_components/opendisplay/config_flow.py b/custom_components/opendisplay/config_flow.py
index 29803d9..1c1ef9c 100644
--- a/custom_components/opendisplay/config_flow.py
+++ b/custom_components/opendisplay/config_flow.py
@@ -322,12 +322,22 @@ async def async_step_bluetooth_confirm(
}
else:
# ATC devices: Use tagtypes.json lookup and store individual fields
- tag_types_manager = await get_tag_types_manager(self.hass)
+ # Try to get tag types manager, but don't fail if unavailable
+ tag_types_manager = None
+ try:
+ tag_types_manager = await get_tag_types_manager(self.hass)
+ _LOGGER.debug("Tag types manager loaded successfully")
+ except Exception as tag_err:
+ _LOGGER.warning(
+ "Could not load tag types during config flow, will use fallback values: %s",
+ tag_err
+ )
+
model_name = get_hw_string(hw_type) if hw_type else "Unknown"
_LOGGER.debug("Resolved hw_type %s to model: %s", hw_type, model_name)
- # Refine color_scheme using TagTypes db
- if tag_types_manager.is_in_hw_map(hw_type):
+ # Refine color_scheme using TagTypes db if available
+ if tag_types_manager and tag_types_manager.is_in_hw_map(hw_type):
tag_type = await tag_types_manager.get_tag_info(hw_type)
color_table = tag_type.color_table
@@ -342,10 +352,16 @@ async def async_step_bluetooth_confirm(
else:
# Fallback to protocol detection
color_scheme = capabilities.color_scheme
- _LOGGER.warning(
- "hw_type %s not in TagTypes, using protocol color_scheme: %d",
- hw_type, color_scheme
- )
+ if not tag_types_manager:
+ _LOGGER.info(
+ "Tag types not available, using protocol-detected color_scheme: %d",
+ color_scheme
+ )
+ else:
+ _LOGGER.warning(
+ "hw_type %s not in TagTypes, using protocol color_scheme: %d",
+ hw_type, color_scheme
+ )
# Build device metadata from capabilities
device_metadata = {
diff --git a/custom_components/opendisplay/const.py b/custom_components/opendisplay/const.py
index 024f11d..6fe1160 100644
--- a/custom_components/opendisplay/const.py
+++ b/custom_components/opendisplay/const.py
@@ -4,3 +4,98 @@
SIGNAL_AP_UPDATE = f"{DOMAIN}_ap_update"
OEPL_CONFIG_URL = "https://config.openepaperlink.org/"
ATC_CONFIG_URL = "https://atc1441.github.io/ATC_BLE_OEPL_Image_Upload.html"
+
+# Fallback tag type definitions
+# These definitions are automatically synced from OpenEPaperLink repository
+# See: https://github.com/OpenEPaperLink/OpenEPaperLink/tree/master/resources/tagtypes
+FALLBACK_TAG_DEFINITIONS = {
+ 0: {"version": 4, "name": "M2 1.54\"", "width": 152, "height": 152},
+ 1: {"version": 5, "name": "M2 2.9\"", "width": 296, "height": 128},
+ 2: {"version": 5, "name": "M2 4.2\"", "width": 400, "height": 300},
+ 3: {"version": 6, "name": "M2 2.2\"", "width": 212, "height": 104},
+ 4: {"version": 4, "name": "M2 2.6\"", "width": 296, "height": 152},
+ 5: {"version": 4, "name": "M2 7.4\"", "width": 640, "height": 384},
+ 6: {"version": 4, "name": "Opticon 2.2\"", "width": 250, "height": 128},
+ 7: {"version": 4, "name": "Opticon 2.9\"", "width": 296, "height": 128},
+ 8: {"version": 2, "name": "Opticon 4.2\"", "width": 400, "height": 300},
+ 9: {"version": 2, "name": "Opticon 7.5\"", "width": 640, "height": 384},
+ 17: {"version": 3, "name": "M2 2.9\" (UC8151)", "width": 296, "height": 128},
+ 18: {"version": 3, "name": "M2 4.2\" UC", "width": 400, "height": 300},
+ 33: {"version": 2, "name": "ST‐GM29XXF 2.9\"", "width": 296, "height": 128},
+ 34: {"version": 2, "name": "M2 2.7\"", "width": 264, "height": 176},
+ 38: {"version": 1, "name": "M2 7.5\" BW", "width": 640, "height": 384},
+ 39: {"version": 3, "name": "ST‐GM29MT1 2.9\"", "width": 296, "height": 128},
+ 40: {"version": 2, "name": "M3 1.6\" BWRY", "width": 168, "height": 168},
+ 41: {"version": 1, "name": "M3 2.4\" BWRY", "width": 296, "height": 168},
+ 42: {"version": 1, "name": "M3 3.0\" BWRY", "width": 400, "height": 168},
+ 43: {"version": 1, "name": "M3 2.9\" BWRY", "width": 384, "height": 168},
+ 44: {"version": 1, "name": "M3 4.3\" BWRY", "width": 522, "height": 152},
+ 45: {"version": 2, "name": "M3 12.2\"", "width": 960, "height": 768},
+ 46: {"version": 5, "name": "M3 9.7\"", "width": 960, "height": 672},
+ 47: {"version": 4, "name": "M3 4.3\"", "width": 522, "height": 152},
+ 48: {"version": 2, "name": "M3 1.6\"", "width": 200, "height": 200},
+ 49: {"version": 1, "name": "M3 2.2\"", "width": 296, "height": 160},
+ 50: {"version": 1, "name": "M3 2.6\"", "width": 360, "height": 184},
+ 51: {"version": 3, "name": "M3 2.9\"", "width": 384, "height": 168},
+ 52: {"version": 2, "name": "M3 4.2\"", "width": 400, "height": 300},
+ 53: {"version": 2, "name": "M3 6.0\"", "width": 600, "height": 448},
+ 54: {"version": 5, "name": "M3 7.5\"", "width": 800, "height": 480},
+ 55: {"version": 3, "name": "M3 11.6\"", "width": 960, "height": 640},
+ 60: {"version": 3, "name": "M3 4.2\" BWY", "width": 400, "height": 300},
+ 64: {"version": 1, "name": "M3 2.9\" BW", "width": 384, "height": 168},
+ 65: {"version": 1, "name": "M3 5.85\"", "width": 792, "height": 272},
+ 66: {"version": 1, "name": "M3 5.85\" BW", "width": 792, "height": 272},
+ 67: {"version": 2, "name": "M3 1.3\" Peghook", "width": 144, "height": 200},
+ 68: {"version": 2, "name": "M3 5.81\" BW", "width": 720, "height": 256},
+ 69: {"version": 3, "name": "M3 2.2 Lite\"", "width": 250, "height": 128},
+ 70: {"version": 1, "name": "M3 2.2\" BW", "width": 296, "height": 160},
+ 71: {"version": 4, "name": "M3 2.7\"", "width": 300, "height": 200},
+ 72: {"version": 1, "name": "M3 5.81\" BWR", "width": 720, "height": 256},
+ 73: {"version": 2, "name": "M3 5.81\" V2 BWR", "width": 720, "height": 256},
+ 74: {"version": 1, "name": "M3 1.6\" 200px BWRY", "width": 200, "height": 200},
+ 75: {"version": 1, "name": "M3 2.2\" BWRY", "width": 296, "height": 160},
+ 76: {"version": 1, "name": "M3 7.5\" BWRY", "width": 800, "height": 480},
+ 77: {"version": 3, "name": "M3 11.6\" BWRY", "width": 960, "height": 640},
+ 78: {"version": 2, "name": "M3 2.6\" BW", "width": 360, "height": 184},
+ 80: {"version": 2, "name": "HD150 5.83\" BWR", "width": 648, "height": 480},
+ 84: {"version": 4, "name": "HS BW 2.13\"", "width": 256, "height": 128},
+ 85: {"version": 5, "name": "HS BWR 2.13\"", "width": 256, "height": 128},
+ 86: {"version": 6, "name": "HS BWR 2.66\"", "width": 296, "height": 152},
+ 87: {"version": 3, "name": "TLSR BWR 1.54\"", "width": 200, "height": 200},
+ 88: {"version": 3, "name": "TLSR BW 2.13\"", "width": 256, "height": 128},
+ 89: {"version": 3, "name": "TLSR BWR 2.13\"", "width": 264, "height": 136},
+ 90: {"version": 1, "name": "HS BW 2.13\" LowRes", "width": 212, "height": 104},
+ 96: {"version": 6, "name": "HS BWY 3.5\"", "width": 384, "height": 184},
+ 97: {"version": 4, "name": "HS BWR 3.5\"", "width": 384, "height": 184},
+ 98: {"version": 4, "name": "HS BW 3.5\"", "width": 384, "height": 184},
+ 99: {"version": 6, "name": "TLSR BWR 4.2\"", "width": 400, "height": 300},
+ 102: {"version": 2, "name": "HS BWY 7,5\"", "width": 800, "height": 480},
+ 103: {"version": 3, "name": "HS 2.00\" BWY", "width": 152, "height": 200},
+ 104: {"version": 4, "name": "HS BWY 3.46\"", "width": 480, "height": 176},
+ 105: {"version": 4, "name": "TLSR BW 2.13\"", "width": 250, "height": 136},
+ 106: {"version": 1, "name": "HS BWR 5,83\"", "width": 648, "height": 480},
+ 107: {"version": 3, "name": "HS BWRY 7,5\"", "width": 800, "height": 480},
+ 108: {"version": 3, "name": "HS BWRY 2,00\"", "width": 152, "height": 200},
+ 109: {"version": 3, "name": "HS BWRY 3,5\"", "width": 384, "height": 184},
+ 110: {"version": 3, "name": "HS BWRY 2,9\"", "width": 296, "height": 128},
+ 111: {"version": 2, "name": "HS BWRY 2,60\"", "width": 296, "height": 152},
+ 128: {"version": 1, "name": "Chroma 7.4\"", "width": 640, "height": 384},
+ 129: {"version": 2, "name": "Chroma Aeon 74 7.4\"", "width": 800, "height": 480},
+ 130: {"version": 2, "name": "Chroma29 2.9\"", "width": 296, "height": 128},
+ 131: {"version": 2, "name": "Chroma42 4.2\"", "width": 400, "height": 300},
+ 176: {"version": 5, "name": "Gicisky BLE EPD BW 2.13\"", "width": 250, "height": 128},
+ 177: {"version": 5, "name": "Gicisky BLE EPD BWR 2.13\"", "width": 250, "height": 128},
+ 178: {"version": 2, "name": "Gicisky BLE EPD BW 2.9\"", "width": 296, "height": 128},
+ 179: {"version": 2, "name": "Gicisky BLE EPD BWR 2.9\"", "width": 296, "height": 128},
+ 181: {"version": 2, "name": "Gicisky BLE EPD BWR 4.2\"", "width": 400, "height": 300},
+ 186: {"version": 5, "name": "Gicisky BLE TFT 2.13\"", "width": 250, "height": 136},
+ 189: {"version": 2, "name": "BLE EPD BWR 2.9\" Silabs", "width": 384, "height": 168},
+ 190: {"version": 1, "name": "ATC MiThermometer BLE", "width": 6, "height": 8},
+ 192: {"version": 2, "name": "BWRY example", "width": 360, "height": 180},
+ 226: {"version": 1, "name": "LILYGO TPANEL 4\"", "width": 480, "height": 480},
+ 227: {"version": 1, "name": "GDEM1085Z51 10.85\"", "width": 1360, "height": 480},
+ 228: {"version": 1, "name": "BLE TFT 128x128", "width": 128, "height": 128},
+ 229: {"version": 1, "name": "TFT 240x320", "width": 320, "height": 172},
+ 240: {"version": 2, "name": "SLT‐EM007 Segmented", "width": 0, "height": 0},
+ 250: {"version": 1, "name": "ConfigMode", "width": 0, "height": 0},
+}
diff --git a/custom_components/opendisplay/tag_types.py b/custom_components/opendisplay/tag_types.py
index fa9b012..25f4776 100644
--- a/custom_components/opendisplay/tag_types.py
+++ b/custom_components/opendisplay/tag_types.py
@@ -12,12 +12,12 @@
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import storage
-from .const import DOMAIN
+from .const import DOMAIN, FALLBACK_TAG_DEFINITIONS
_LOGGER = logging.getLogger(__name__)
-GITHUB_API_URL = "https://api.github.com/repos/OpenDisplay/OpenDisplay/contents/resources/tagtypes"
-GITHUB_RAW_URL = "https://raw.githubusercontent.com/OpenDisplay/OpenDisplay/master/resources/tagtypes"
+GITHUB_API_URL = "https://api.github.com/repos/OpenEPaperLink/OpenEPaperLink/contents/resources/tagtypes"
+GITHUB_RAW_URL = "https://raw.githubusercontent.com/OpenEPaperLink/OpenEPaperLink/master/resources/tagtypes"
CACHE_DURATION = timedelta(hours=48) # Cache tag definitions for 48 hours
STORAGE_VERSION = 1
STORAGE_KEY = "opendisplay_tagtypes"
@@ -239,6 +239,13 @@ async def load_stored_data(self) -> None:
if fetch_success:
await self._cleanup_legacy_file()
else:
+ # If fetch failed and we have no types, load fallback definitions
+ if not self._tag_types:
+ _LOGGER.warning(
+ "Failed to fetch tag types from GitHub and no stored data available. "
+ "Loading fallback definitions. Tag types will be refreshed on next integration reload."
+ )
+ self._load_fallback_types()
await self._cleanup_legacy_file()
async def _save_to_store(self) -> None:
@@ -308,30 +315,32 @@ async def ensure_types_loaded(self) -> None:
This is the primary method that should be called before accessing
tag type information to ensure data availability.
- Raises:
- HomeAssistantError: If tag types could not be loaded
+ If tag types cannot be loaded from GitHub or storage, fallback
+ definitions will be used to ensure basic functionality.
"""
async with self._lock:
if not self._tag_types:
await self.load_stored_data()
- # If still no types after loading from storage, this is a critical failure
+ # After load_stored_data, we should always have types (either from storage,
+ # GitHub, or fallback). If not, something is seriously wrong.
if not self._tag_types:
- raise HomeAssistantError(
- translation_domain=DOMAIN,
- translation_key="tagtypes_load_failed",
+ _LOGGER.error(
+ "Critical error: No tag types available after loading. "
+ "This should not happen as fallback types should be loaded."
)
+ # Load fallback as last resort
+ self._load_fallback_types()
# If the cache is expired, attempt refresh
if not self._last_update or datetime.now() - self._last_update > CACHE_DURATION:
_LOGGER.debug("Tag types cache expired, attempting refresh")
fetch_success = await self._fetch_tag_types()
- # If refresh failed and have no valid types, raise an exception
- if not fetch_success and not self._tag_types:
- raise HomeAssistantError(
- translation_domain=DOMAIN,
- translation_key="tagtypes_refresh_failed"
+ # If refresh failed, log a warning but continue with existing types
+ if not fetch_success:
+ _LOGGER.warning(
+ "Failed to refresh tag types from GitHub. Using cached or fallback definitions."
)
async def _fetch_tag_types(self) -> bool:
@@ -348,11 +357,17 @@ async def _fetch_tag_types(self) -> bool:
falls back to built-in basic definitions.
"""
try:
+ _LOGGER.debug("Fetching tag type definitions from GitHub: %s", GITHUB_API_URL)
async with aiohttp.ClientSession() as session:
# First get the directory listing from GitHub API
headers = {"Accept": "application/vnd.github.v3+json"}
async with session.get(GITHUB_API_URL, headers=headers) as response:
if response.status != 200:
+ _LOGGER.error(
+ "GitHub API request failed with status %d for URL: %s",
+ response.status,
+ GITHUB_API_URL
+ )
raise Exception(f"GitHub API returned status {response.status}")
directory_contents = await response.json()
@@ -405,13 +420,24 @@ async def _fetch_tag_types(self) -> bool:
if new_types:
self._tag_types = new_types
self._last_update = datetime.now()
- _LOGGER.info(f"Successfully loaded {len(new_types)} tag definitions")
+ _LOGGER.info(
+ "Successfully loaded %d tag definitions from GitHub",
+ len(new_types)
+ )
await self._save_to_store()
return True
- _LOGGER.error("No valid tag definitions found")
+ _LOGGER.warning(
+ "No valid tag definitions found in GitHub repository at %s",
+ GITHUB_API_URL
+ )
except Exception as e:
- _LOGGER.error(f"Error fetching tag types: {str(e)}")
+ _LOGGER.error(
+ "Error fetching tag types from %s: %s",
+ GITHUB_API_URL,
+ str(e),
+ exc_info=True
+ )
return False
# Do NOT load fallback types - let caller decide how to handle failure
@@ -440,30 +466,17 @@ def _validate_tag_definition(self, data: Dict) -> bool:
def _load_fallback_types(self) -> None:
"""Load basic fallback definitions if fetching fails on first run.
- Populates the manager with a minimal set of built-in tag type
+ Populates the manager with a comprehensive set of built-in tag type
definitions to ensure basic functionality when GitHub is unreachable.
- This provides support for common tag models with basic dimensions,
- though without detailed configuration options.
+ This provides support for all known tag models with proper dimensions,
+ version information, and basic configuration options.
- The fallback types include:
-
- - Common M2 tag sizes (1.54", 2.9", 4.2")
- - AP display types
- - LILYGO TPANEL
- - Segmented tag type
+ The fallback types include all tag definitions from the OpenEPaperLink
+ repository at: https://github.com/OpenEPaperLink/OpenEPaperLink/tree/master/resources/tagtypes
"""
- fallback_definitions = {
- 0: {"name": "M2 1.54\"", "width": 152, "height": 152},
- 1: {"name": "M2 2.9\"", "width": 296, "height": 128},
- 2: {"name": "M2 4.2\"", "width": 400, "height": 300},
- 224: {"name": "AP display", "width": 320, "height": 170},
- 225: {"name": "AP display", "width": 160, "height": 80},
- 226: {"name": "LILYGO TPANEL", "width": 480, "height": 480},
- 240: {"name": "Segmented", "width": 0, "height": 0},
- }
self._tag_types = {
- type_id: TagType(type_id, data) for type_id, data in fallback_definitions.items()
+ type_id: TagType(type_id, data) for type_id, data in FALLBACK_TAG_DEFINITIONS.items()
}
self._last_update = datetime.now()
_LOGGER.warning("Loaded fallback tag definitions")
diff --git a/scripts/fetch_tag_types.py b/scripts/fetch_tag_types.py
new file mode 100644
index 0000000..b891cc8
--- /dev/null
+++ b/scripts/fetch_tag_types.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python3
+"""Fetch tag type definitions from the OpenEPaperLink repository.
+
+Downloads all tag type JSON files from the OpenEPaperLink GitHub repository
+and saves them as a consolidated JSON file for further processing.
+"""
+
+import json
+import re
+import sys
+import urllib.request
+
+
+GITHUB_TREE_URL = (
+ "https://github.com/OpenEPaperLink/OpenEPaperLink/tree/master/resources/tagtypes"
+)
+GITHUB_RAW_URL = (
+ "https://raw.githubusercontent.com/OpenEPaperLink/OpenEPaperLink"
+ "/master/resources/tagtypes"
+)
+
+
+def fetch_file_list():
+ """Fetch the list of tag type JSON files from the repository."""
+ print("Fetching tag type files from OpenEPaperLink repository...")
+ headers = {"User-Agent": "Mozilla/5.0"}
+ req = urllib.request.Request(GITHUB_TREE_URL, headers=headers)
+
+ with urllib.request.urlopen(req, timeout=30) as response:
+ html = response.read().decode("utf-8")
+ json_files = re.findall(r"([0-9a-fA-F]+\.json)", html)
+ json_files = sorted(set(json_files))
+ print(f"Found {len(json_files)} tag type files")
+ return json_files
+
+
+def fetch_tag_types(json_files):
+ """Fetch and parse all tag type definitions."""
+ tag_types = {}
+ errors = []
+
+ for filename in json_files:
+ url = f"{GITHUB_RAW_URL}/{filename}"
+ try:
+ with urllib.request.urlopen(url, timeout=10) as response:
+ data = json.loads(response.read().decode("utf-8"))
+ type_id = int(filename.replace(".json", ""), 16)
+
+ tag_types[type_id] = {
+ "version": data.get("version"),
+ "name": data.get("name"),
+ "width": data.get("width"),
+ "height": data.get("height"),
+ }
+ except Exception as e:
+ errors.append(f"Error fetching {filename}: {e}")
+
+ if errors:
+ for error in errors:
+ print(error)
+
+ print(f"Successfully fetched {len(tag_types)} tag type definitions")
+ return tag_types
+
+
+def main():
+ """Fetch tag type definitions and save to a JSON file."""
+ output_file = sys.argv[1] if len(sys.argv) > 1 else "new_tag_types.json"
+
+ try:
+ json_files = fetch_file_list()
+ except Exception as e:
+ print(f"Error fetching file list: {e}")
+ sys.exit(1)
+
+ tag_types = fetch_tag_types(json_files)
+
+ with open(output_file, "w") as f:
+ json.dump(tag_types, f, indent=2)
+
+ print(f"Tag types saved to {output_file}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/generate_tag_types.py b/scripts/generate_tag_types.py
new file mode 100644
index 0000000..07b7f60
--- /dev/null
+++ b/scripts/generate_tag_types.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python3
+"""Generate updated const.py from fetched tag type definitions.
+
+Reads a JSON file of tag type definitions (produced by fetch_tag_types.py),
+compares them against the current fallback definitions in const.py,
+and updates the file if there are changes.
+
+Sets GitHub Actions outputs for downstream workflow steps.
+"""
+
+import json
+import os
+import re
+import sys
+
+
+CONST_PATH = "custom_components/opendisplay/const.py"
+FALLBACK_PATTERN = re.compile(
+ r"(FALLBACK_TAG_DEFINITIONS = \{)\n(.*?)\n(\})", re.DOTALL
+)
+ENTRY_PATTERN = re.compile(r"\s+(\d+):")
+
+
+def load_new_tag_types(input_file):
+ """Load new tag types from JSON, converting keys to integers."""
+ with open(input_file, "r") as f:
+ raw = json.load(f)
+ return {int(k): v for k, v in raw.items()}
+
+
+def parse_current_definitions(content):
+ """Extract current fallback definitions from const.py content."""
+ match = FALLBACK_PATTERN.search(content)
+ if not match:
+ print("Error: Could not find FALLBACK_TAG_DEFINITIONS in const.py")
+ sys.exit(1)
+
+ current_types = {}
+ for line in match.group(2).split("\n"):
+ m = ENTRY_PATTERN.match(line)
+ if m:
+ type_id = int(m.group(1))
+ current_types[type_id] = line.strip()
+
+ return current_types
+
+
+def compute_changes(current_types, new_tag_types):
+ """Compute added, removed, and modified tag types."""
+ added = []
+ removed = []
+ modified = []
+
+ for type_id in sorted(new_tag_types.keys()):
+ if type_id not in current_types:
+ added.append(type_id)
+ else:
+ new_line = f"{type_id}: {json.dumps(new_tag_types[type_id], ensure_ascii=False)},"
+ if new_line != current_types[type_id]:
+ modified.append(type_id)
+
+ for type_id in sorted(current_types.keys()):
+ if type_id not in new_tag_types:
+ removed.append(type_id)
+
+ return added, removed, modified
+
+
+def generate_fallback_content(new_tag_types):
+ """Generate the new FALLBACK_TAG_DEFINITIONS dict content."""
+ lines = []
+ for type_id in sorted(new_tag_types.keys()):
+ type_data = new_tag_types[type_id]
+ line = f" {type_id}: {json.dumps(type_data, ensure_ascii=False)},"
+ lines.append(line)
+ return "\n".join(lines)
+
+
+def update_tag_types_file(content, new_fallback):
+ """Replace FALLBACK_TAG_DEFINITIONS content in const.py."""
+ match = FALLBACK_PATTERN.search(content)
+ if not match:
+ print("Error: Could not find FALLBACK_TAG_DEFINITIONS in const.py")
+ sys.exit(1)
+
+ start = match.start(2)
+ end = match.end(2)
+ return content[:start] + new_fallback + content[end:]
+
+
+def build_summary(added, removed, modified):
+ """Build a human-readable summary of changes."""
+ summary = []
+ if added:
+ ids = ", ".join(map(str, added[:5]))
+ suffix = "..." if len(added) > 5 else ""
+ summary.append(f"Added: {len(added)} types ({ids}{suffix})")
+ if removed:
+ ids = ", ".join(map(str, removed[:5]))
+ suffix = "..." if len(removed) > 5 else ""
+ summary.append(f"Removed: {len(removed)} types ({ids}{suffix})")
+ if modified:
+ ids = ", ".join(map(str, modified[:5]))
+ suffix = "..." if len(modified) > 5 else ""
+ summary.append(f"Modified: {len(modified)} types ({ids}{suffix})")
+ return summary
+
+
+def set_github_output(changed, summary):
+ """Set GitHub Actions step outputs."""
+ github_output = os.environ.get("GITHUB_OUTPUT")
+ if not github_output:
+ return
+
+ with open(github_output, "a") as f:
+ f.write(f"changed={'true' if changed else 'false'}\n")
+ if summary:
+ f.write(f"summary={'|'.join(summary)}\n")
+
+
+def main():
+ """Generate updated const.py from fetched definitions."""
+ input_file = sys.argv[1] if len(sys.argv) > 1 else "new_tag_types.json"
+
+ new_tag_types = load_new_tag_types(input_file)
+
+ with open(CONST_PATH, "r") as f:
+ content = f.read()
+
+ current_types = parse_current_definitions(content)
+
+ print(f"Current definitions: {len(current_types)} types")
+ print(f"New definitions: {len(new_tag_types)} types")
+
+ added, removed, modified = compute_changes(current_types, new_tag_types)
+ changed = bool(added or removed or modified)
+
+ new_fallback = generate_fallback_content(new_tag_types)
+ new_content = update_tag_types_file(content, new_fallback)
+
+ with open(CONST_PATH, "w") as f:
+ f.write(new_content)
+
+ summary = build_summary(added, removed, modified)
+
+ if changed:
+ print("CHANGED=true")
+ print(f"SUMMARY={'|'.join(summary)}")
+ else:
+ print("CHANGED=false")
+ print("No changes detected")
+
+ set_github_output(changed, summary)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tests/scripts/test_sync_tag_types.py b/tests/scripts/test_sync_tag_types.py
new file mode 100644
index 0000000..71e4475
--- /dev/null
+++ b/tests/scripts/test_sync_tag_types.py
@@ -0,0 +1,377 @@
+"""Tests for the tag type sync scripts."""
+
+import json
+import os
+import re
+import textwrap
+from unittest.mock import MagicMock, patch
+
+import pytest
+
+# Add scripts directory to path so we can import the modules
+import sys
+
+REPO_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+sys.path.insert(0, os.path.join(REPO_ROOT, "scripts"))
+
+import fetch_tag_types
+import generate_tag_types
+
+
+# ---------------------------------------------------------------------------
+# Fixtures
+# ---------------------------------------------------------------------------
+
+SAMPLE_CONST_PY = textwrap.dedent("""\
+ DOMAIN = "opendisplay"
+
+ FALLBACK_TAG_DEFINITIONS = {
+ 0: {"version": 4, "name": "M2 1.54\\"", "width": 152, "height": 152},
+ 1: {"version": 5, "name": "M2 2.9\\"", "width": 296, "height": 128},
+ 240: {"version": 2, "name": "SLT\u2010EM007 Segmented", "width": 0, "height": 0},
+ 250: {"version": 1, "name": "ConfigMode", "width": 0, "height": 0},
+ }
+""")
+
+
+@pytest.fixture
+def const_file(tmp_path):
+ """Write a minimal const.py and return its path."""
+ p = tmp_path / "const.py"
+ p.write_text(SAMPLE_CONST_PY)
+ return p
+
+
+@pytest.fixture
+def new_types_json(tmp_path):
+ """Write a new_tag_types.json and return its path."""
+ data = {
+ 0: {"version": 4, "name": 'M2 1.54"', "width": 152, "height": 152},
+ 1: {"version": 5, "name": 'M2 2.9"', "width": 296, "height": 128},
+ 240: {"version": 2, "name": "SLT\u2010EM007 Segmented", "width": 0, "height": 0},
+ 250: {"version": 1, "name": "ConfigMode", "width": 0, "height": 0},
+ }
+ p = tmp_path / "new_tag_types.json"
+ p.write_text(json.dumps(data, indent=2))
+ return p
+
+
+# ---------------------------------------------------------------------------
+# Tests for generate_tag_types – load_new_tag_types
+# ---------------------------------------------------------------------------
+
+class TestLoadNewTagTypes:
+ """Tests for loading and converting JSON tag types."""
+
+ def test_keys_are_integers(self, new_types_json):
+ """JSON string keys must be converted to integers."""
+ result = generate_tag_types.load_new_tag_types(str(new_types_json))
+ assert all(isinstance(k, int) for k in result.keys())
+
+ def test_values_preserved(self, new_types_json):
+ """Tag type data values must be preserved after loading."""
+ result = generate_tag_types.load_new_tag_types(str(new_types_json))
+ assert result[0]["name"] == 'M2 1.54"'
+ assert result[250]["width"] == 0
+
+
+# ---------------------------------------------------------------------------
+# Tests for generate_tag_types – parse_current_definitions
+# ---------------------------------------------------------------------------
+
+class TestParseCurrentDefinitions:
+ """Tests for parsing FALLBACK_TAG_DEFINITIONS from const.py."""
+
+ def test_parses_all_entries(self, const_file):
+ """Should parse all entries from the FALLBACK_TAG_DEFINITIONS block."""
+ content = const_file.read_text()
+ result = generate_tag_types.parse_current_definitions(content)
+ assert len(result) == 4
+ assert set(result.keys()) == {0, 1, 240, 250}
+
+ def test_keys_are_integers(self, const_file):
+ """Parsed keys must be integers."""
+ content = const_file.read_text()
+ result = generate_tag_types.parse_current_definitions(content)
+ assert all(isinstance(k, int) for k in result.keys())
+
+ def test_exits_on_missing_block(self):
+ """Should exit if FALLBACK_TAG_DEFINITIONS block is not found."""
+ with pytest.raises(SystemExit):
+ generate_tag_types.parse_current_definitions("no such block here")
+
+
+# ---------------------------------------------------------------------------
+# Tests for generate_tag_types – compute_changes
+# ---------------------------------------------------------------------------
+
+class TestComputeChanges:
+ """Tests for computing diffs between current and new definitions."""
+
+ def test_no_changes(self):
+ """Identical data should produce no changes."""
+ current = {
+ 0: '0: {"version": 4, "name": "Tag0", "width": 100, "height": 100},',
+ }
+ new = {0: {"version": 4, "name": "Tag0", "width": 100, "height": 100}}
+ added, removed, modified = generate_tag_types.compute_changes(current, new)
+ assert added == []
+ assert removed == []
+ assert modified == []
+
+ def test_added(self):
+ """New type IDs should be detected as added."""
+ current = {}
+ new = {5: {"version": 1, "name": "New", "width": 10, "height": 10}}
+ added, removed, modified = generate_tag_types.compute_changes(current, new)
+ assert added == [5]
+ assert removed == []
+
+ def test_removed(self):
+ """Missing type IDs should be detected as removed."""
+ current = {
+ 5: '5: {"version": 1, "name": "Old", "width": 10, "height": 10},',
+ }
+ new = {}
+ added, removed, modified = generate_tag_types.compute_changes(current, new)
+ assert removed == [5]
+ assert added == []
+
+ def test_modified(self):
+ """Changed values should be detected as modified."""
+ current = {
+ 0: '0: {"version": 1, "name": "Tag0", "width": 100, "height": 100},',
+ }
+ new = {0: {"version": 2, "name": "Tag0", "width": 100, "height": 100}}
+ added, removed, modified = generate_tag_types.compute_changes(current, new)
+ assert modified == [0]
+
+ def test_sorting(self):
+ """Results should be sorted numerically, not lexicographically."""
+ current = {}
+ new = {
+ 100: {"version": 1, "name": "A", "width": 1, "height": 1},
+ 2: {"version": 1, "name": "B", "width": 1, "height": 1},
+ 17: {"version": 1, "name": "C", "width": 1, "height": 1},
+ }
+ added, _, _ = generate_tag_types.compute_changes(current, new)
+ assert added == [2, 17, 100]
+
+
+# ---------------------------------------------------------------------------
+# Tests for generate_tag_types – generate_fallback_content
+# ---------------------------------------------------------------------------
+
+class TestGenerateFallbackContent:
+ """Tests for generating the FALLBACK_TAG_DEFINITIONS dict content."""
+
+ def test_format(self):
+ """Each line should have 4-space indent, type_id, JSON data, and trailing comma."""
+ data = {0: {"version": 1, "name": "Tag", "width": 10, "height": 20}}
+ content = generate_tag_types.generate_fallback_content(data)
+ assert content.startswith(" 0:")
+ assert content.endswith(",")
+
+ def test_sorted_numerically(self):
+ """Entries should be sorted by numeric type_id."""
+ data = {
+ 100: {"version": 1, "name": "A", "width": 1, "height": 1},
+ 2: {"version": 1, "name": "B", "width": 1, "height": 1},
+ 17: {"version": 1, "name": "C", "width": 1, "height": 1},
+ }
+ content = generate_tag_types.generate_fallback_content(data)
+ ids = [
+ int(m.group(1))
+ for line in content.split("\n")
+ if (m := re.match(r"\s+(\d+):", line))
+ ]
+ assert ids == [2, 17, 100]
+
+ def test_unicode_chars_preserved(self):
+ """Unicode characters should be preserved (not escaped) with ensure_ascii=False."""
+ data = {240: {"version": 2, "name": "SLT\u2010EM007", "width": 0, "height": 0}}
+ content = generate_tag_types.generate_fallback_content(data)
+ # ensure_ascii=False preserves the actual Unicode character
+ assert "\u2010" in content
+
+
+# ---------------------------------------------------------------------------
+# Tests for generate_tag_types – update_tag_types_file
+# ---------------------------------------------------------------------------
+
+class TestUpdateTagTypesFile:
+ """Tests for replacing FALLBACK_TAG_DEFINITIONS in file content."""
+
+ def test_replaces_content(self, const_file):
+ """The fallback block should be replaced with new content."""
+ content = const_file.read_text()
+ new_fallback = ' 999: {"version": 1, "name": "New", "width": 1, "height": 1},'
+ result = generate_tag_types.update_tag_types_file(content, new_fallback)
+ assert "999:" in result
+ # Old entries removed
+ assert "250:" not in result
+
+ def test_preserves_surrounding_code(self, const_file):
+ """Code around FALLBACK_TAG_DEFINITIONS should be unchanged."""
+ content = const_file.read_text()
+ new_fallback = ' 999: {"version": 1, "name": "New", "width": 1, "height": 1},'
+ result = generate_tag_types.update_tag_types_file(content, new_fallback)
+ assert "DOMAIN" in result
+
+ def test_unicode_in_replacement(self, const_file):
+ """Unicode escape sequences in replacement must not cause regex errors.
+
+ This is the primary bug that was fixed: json.dumps() produces \\uXXXX
+ sequences which re.sub() would interpret as bad regex escapes.
+ """
+ content = const_file.read_text()
+ # This would fail with re.sub() because \u2010 is a bad regex escape
+ new_fallback = ' 240: {"version": 2, "name": "SLT\\u2010EM007", "width": 0, "height": 0},'
+ result = generate_tag_types.update_tag_types_file(content, new_fallback)
+ assert "\\u2010" in result
+
+ def test_exits_on_missing_block(self):
+ """Should exit if FALLBACK_TAG_DEFINITIONS block is not found."""
+ with pytest.raises(SystemExit):
+ generate_tag_types.update_tag_types_file("no such block", "replacement")
+
+
+# ---------------------------------------------------------------------------
+# Tests for generate_tag_types – build_summary
+# ---------------------------------------------------------------------------
+
+class TestBuildSummary:
+ """Tests for the human-readable change summary."""
+
+ def test_empty_on_no_changes(self):
+ assert generate_tag_types.build_summary([], [], []) == []
+
+ def test_added(self):
+ result = generate_tag_types.build_summary([1, 2], [], [])
+ assert len(result) == 1
+ assert "Added: 2" in result[0]
+
+ def test_truncated(self):
+ result = generate_tag_types.build_summary(list(range(10)), [], [])
+ assert "..." in result[0]
+
+
+# ---------------------------------------------------------------------------
+# Tests for generate_tag_types – set_github_output
+# ---------------------------------------------------------------------------
+
+class TestSetGithubOutput:
+ """Tests for writing GitHub Actions outputs."""
+
+ def test_writes_changed(self, tmp_path):
+ output_file = tmp_path / "output.txt"
+ output_file.write_text("")
+ with patch.dict(os.environ, {"GITHUB_OUTPUT": str(output_file)}):
+ generate_tag_types.set_github_output(True, ["Added: 1 types (5)"])
+ content = output_file.read_text()
+ assert "changed=true" in content
+ assert "summary=" in content
+
+ def test_no_op_without_env(self, tmp_path):
+ """Should not crash when GITHUB_OUTPUT is not set."""
+ with patch.dict(os.environ, {}, clear=True):
+ generate_tag_types.set_github_output(False, []) # should not raise
+
+
+# ---------------------------------------------------------------------------
+# Tests for generate_tag_types – full main() integration
+# ---------------------------------------------------------------------------
+
+class TestMainIntegration:
+ """Integration tests for the full generate_tag_types.main() flow."""
+
+ def test_no_change_run(self, const_file, new_types_json, tmp_path):
+ """When data matches, output changed=false."""
+ output_file = tmp_path / "output.txt"
+ output_file.write_text("")
+ with patch.object(generate_tag_types, "CONST_PATH", str(const_file)), \
+ patch.dict(os.environ, {"GITHUB_OUTPUT": str(output_file)}), \
+ patch("sys.argv", ["prog", str(new_types_json)]):
+ generate_tag_types.main()
+ assert "changed=false" in output_file.read_text()
+
+ def test_added_type_run(self, const_file, tmp_path):
+ """When a new type is added, output changed=true and file is updated."""
+ data = {
+ 0: {"version": 4, "name": 'M2 1.54"', "width": 152, "height": 152},
+ 1: {"version": 5, "name": 'M2 2.9"', "width": 296, "height": 128},
+ 240: {"version": 2, "name": "SLT\u2010EM007 Segmented", "width": 0, "height": 0},
+ 250: {"version": 1, "name": "ConfigMode", "width": 0, "height": 0},
+ 999: {"version": 1, "name": "Brand New", "width": 100, "height": 200},
+ }
+ json_file = tmp_path / "new.json"
+ json_file.write_text(json.dumps(data, indent=2))
+
+ output_file = tmp_path / "output.txt"
+ output_file.write_text("")
+ with patch.object(generate_tag_types, "CONST_PATH", str(const_file)), \
+ patch.dict(os.environ, {"GITHUB_OUTPUT": str(output_file)}), \
+ patch("sys.argv", ["prog", str(json_file)]):
+ generate_tag_types.main()
+ assert "changed=true" in output_file.read_text()
+ updated = const_file.read_text()
+ assert "999:" in updated
+ assert "Brand New" in updated
+
+
+# ---------------------------------------------------------------------------
+# Tests for fetch_tag_types
+# ---------------------------------------------------------------------------
+
+class TestFetchTagTypes:
+ """Tests for the fetch_tag_types module."""
+
+ def test_fetch_file_list(self):
+ """fetch_file_list should parse JSON filenames from HTML."""
+ fake_html = '00.json 0A.json other.txt'
+ mock_response = MagicMock()
+ mock_response.read.return_value = fake_html.encode("utf-8")
+ mock_response.__enter__ = MagicMock(return_value=mock_response)
+ mock_response.__exit__ = MagicMock(return_value=False)
+
+ with patch("urllib.request.urlopen", return_value=mock_response):
+ result = fetch_tag_types.fetch_file_list()
+ assert result == ["00.json", "0A.json"]
+
+ def test_fetch_tag_types_parses_hex_ids(self):
+ """Filenames should be converted from hex to decimal type IDs."""
+ fake_json = json.dumps({
+ "version": 1, "name": "Test", "width": 100, "height": 50
+ }).encode("utf-8")
+
+ mock_response = MagicMock()
+ mock_response.read.return_value = fake_json
+ mock_response.__enter__ = MagicMock(return_value=mock_response)
+ mock_response.__exit__ = MagicMock(return_value=False)
+
+ with patch("urllib.request.urlopen", return_value=mock_response):
+ result = fetch_tag_types.fetch_tag_types(["0A.json"])
+ # 0x0A = 10
+ assert 10 in result
+ assert result[10]["name"] == "Test"
+
+ def test_fetch_tag_types_handles_errors(self):
+ """Errors fetching individual files should not crash the whole run."""
+ with patch("urllib.request.urlopen", side_effect=Exception("Network error")):
+ result = fetch_tag_types.fetch_tag_types(["00.json"])
+ assert result == {}
+
+ def test_main_writes_json(self, tmp_path):
+ """main() should write fetched data to the output JSON file."""
+ output = tmp_path / "out.json"
+
+ with patch.object(fetch_tag_types, "fetch_file_list", return_value=["01.json"]), \
+ patch.object(fetch_tag_types, "fetch_tag_types", return_value={
+ 1: {"version": 1, "name": "X", "width": 10, "height": 10}
+ }), \
+ patch("sys.argv", ["prog", str(output)]):
+ fetch_tag_types.main()
+
+ data = json.loads(output.read_text())
+ assert "1" in data # JSON keys are strings
+ assert data["1"]["name"] == "X"